var/home/core/zuul-output/0000755000175000017500000000000015066533750014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015066547106015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005074356215066547077017732 0ustar rootrootSep 29 16:51:08 crc systemd[1]: Starting Kubernetes Kubelet... Sep 29 16:51:08 crc restorecon[4575]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:08 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Sep 29 16:51:09 crc restorecon[4575]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Sep 29 16:51:10 crc kubenswrapper[4592]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.749577 4592 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755498 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755518 4592 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755525 4592 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755531 4592 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755537 4592 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755544 4592 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755549 4592 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755555 4592 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755560 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755566 4592 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755571 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755575 4592 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755581 4592 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755585 4592 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755590 4592 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755595 4592 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755600 4592 feature_gate.go:330] unrecognized feature gate: Example Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755604 4592 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755609 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755614 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755620 4592 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755625 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755630 4592 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755635 4592 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755639 4592 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755644 4592 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755649 4592 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755656 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755661 4592 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755665 4592 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755677 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755682 4592 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755687 4592 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755693 4592 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755700 4592 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755705 4592 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755710 4592 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755716 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755721 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755727 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755734 4592 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755740 4592 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755747 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755752 4592 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755757 4592 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755762 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755767 4592 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755772 4592 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755778 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755784 4592 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755789 4592 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755796 4592 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755802 4592 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755808 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755813 4592 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755819 4592 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755824 4592 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755829 4592 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755834 4592 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755840 4592 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755845 4592 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755851 4592 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755855 4592 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755861 4592 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755865 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755872 4592 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755878 4592 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755885 4592 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755891 4592 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755896 4592 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.755901 4592 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756817 4592 flags.go:64] FLAG: --address="0.0.0.0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756835 4592 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756853 4592 flags.go:64] FLAG: --anonymous-auth="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756860 4592 flags.go:64] FLAG: --application-metrics-count-limit="100" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756868 4592 flags.go:64] FLAG: --authentication-token-webhook="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756874 4592 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756882 4592 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756890 4592 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756895 4592 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756902 4592 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756908 4592 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756916 4592 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756922 4592 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756928 4592 flags.go:64] FLAG: --cgroup-root="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756933 4592 flags.go:64] FLAG: --cgroups-per-qos="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756939 4592 flags.go:64] FLAG: --client-ca-file="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756944 4592 flags.go:64] FLAG: --cloud-config="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756950 4592 flags.go:64] FLAG: --cloud-provider="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756955 4592 flags.go:64] FLAG: --cluster-dns="[]" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756962 4592 flags.go:64] FLAG: --cluster-domain="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756967 4592 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756973 4592 flags.go:64] FLAG: --config-dir="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756980 4592 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756986 4592 flags.go:64] FLAG: --container-log-max-files="5" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756993 4592 flags.go:64] FLAG: --container-log-max-size="10Mi" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.756999 4592 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757005 4592 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757011 4592 flags.go:64] FLAG: --containerd-namespace="k8s.io" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757016 4592 flags.go:64] FLAG: --contention-profiling="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757022 4592 flags.go:64] FLAG: --cpu-cfs-quota="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757027 4592 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757033 4592 flags.go:64] FLAG: --cpu-manager-policy="none" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757039 4592 flags.go:64] FLAG: --cpu-manager-policy-options="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757046 4592 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757052 4592 flags.go:64] FLAG: --enable-controller-attach-detach="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757058 4592 flags.go:64] FLAG: --enable-debugging-handlers="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757063 4592 flags.go:64] FLAG: --enable-load-reader="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757069 4592 flags.go:64] FLAG: --enable-server="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757075 4592 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757083 4592 flags.go:64] FLAG: --event-burst="100" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757089 4592 flags.go:64] FLAG: --event-qps="50" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757095 4592 flags.go:64] FLAG: --event-storage-age-limit="default=0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757102 4592 flags.go:64] FLAG: --event-storage-event-limit="default=0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757108 4592 flags.go:64] FLAG: --eviction-hard="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757115 4592 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757121 4592 flags.go:64] FLAG: --eviction-minimum-reclaim="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757127 4592 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757133 4592 flags.go:64] FLAG: --eviction-soft="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757139 4592 flags.go:64] FLAG: --eviction-soft-grace-period="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757150 4592 flags.go:64] FLAG: --exit-on-lock-contention="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757182 4592 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757188 4592 flags.go:64] FLAG: --experimental-mounter-path="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757194 4592 flags.go:64] FLAG: --fail-cgroupv1="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757200 4592 flags.go:64] FLAG: --fail-swap-on="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757206 4592 flags.go:64] FLAG: --feature-gates="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757214 4592 flags.go:64] FLAG: --file-check-frequency="20s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757220 4592 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757227 4592 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757233 4592 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757239 4592 flags.go:64] FLAG: --healthz-port="10248" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757245 4592 flags.go:64] FLAG: --help="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757250 4592 flags.go:64] FLAG: --hostname-override="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757256 4592 flags.go:64] FLAG: --housekeeping-interval="10s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757261 4592 flags.go:64] FLAG: --http-check-frequency="20s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757267 4592 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757272 4592 flags.go:64] FLAG: --image-credential-provider-config="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757278 4592 flags.go:64] FLAG: --image-gc-high-threshold="85" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757283 4592 flags.go:64] FLAG: --image-gc-low-threshold="80" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757288 4592 flags.go:64] FLAG: --image-service-endpoint="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757294 4592 flags.go:64] FLAG: --kernel-memcg-notification="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757299 4592 flags.go:64] FLAG: --kube-api-burst="100" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757329 4592 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757336 4592 flags.go:64] FLAG: --kube-api-qps="50" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757341 4592 flags.go:64] FLAG: --kube-reserved="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757348 4592 flags.go:64] FLAG: --kube-reserved-cgroup="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757354 4592 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757359 4592 flags.go:64] FLAG: --kubelet-cgroups="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757365 4592 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757371 4592 flags.go:64] FLAG: --lock-file="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757377 4592 flags.go:64] FLAG: --log-cadvisor-usage="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757383 4592 flags.go:64] FLAG: --log-flush-frequency="5s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757389 4592 flags.go:64] FLAG: --log-json-info-buffer-size="0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757398 4592 flags.go:64] FLAG: --log-json-split-stream="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757405 4592 flags.go:64] FLAG: --log-text-info-buffer-size="0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757411 4592 flags.go:64] FLAG: --log-text-split-stream="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757416 4592 flags.go:64] FLAG: --logging-format="text" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757422 4592 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757428 4592 flags.go:64] FLAG: --make-iptables-util-chains="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757433 4592 flags.go:64] FLAG: --manifest-url="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757439 4592 flags.go:64] FLAG: --manifest-url-header="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757447 4592 flags.go:64] FLAG: --max-housekeeping-interval="15s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757453 4592 flags.go:64] FLAG: --max-open-files="1000000" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757459 4592 flags.go:64] FLAG: --max-pods="110" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757465 4592 flags.go:64] FLAG: --maximum-dead-containers="-1" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757470 4592 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757476 4592 flags.go:64] FLAG: --memory-manager-policy="None" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757481 4592 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757487 4592 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757493 4592 flags.go:64] FLAG: --node-ip="192.168.126.11" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757499 4592 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757512 4592 flags.go:64] FLAG: --node-status-max-images="50" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757518 4592 flags.go:64] FLAG: --node-status-update-frequency="10s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757523 4592 flags.go:64] FLAG: --oom-score-adj="-999" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757529 4592 flags.go:64] FLAG: --pod-cidr="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757534 4592 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757542 4592 flags.go:64] FLAG: --pod-manifest-path="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757548 4592 flags.go:64] FLAG: --pod-max-pids="-1" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757555 4592 flags.go:64] FLAG: --pods-per-core="0" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757560 4592 flags.go:64] FLAG: --port="10250" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757566 4592 flags.go:64] FLAG: --protect-kernel-defaults="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757572 4592 flags.go:64] FLAG: --provider-id="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757578 4592 flags.go:64] FLAG: --qos-reserved="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757584 4592 flags.go:64] FLAG: --read-only-port="10255" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757589 4592 flags.go:64] FLAG: --register-node="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757596 4592 flags.go:64] FLAG: --register-schedulable="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757602 4592 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757612 4592 flags.go:64] FLAG: --registry-burst="10" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757617 4592 flags.go:64] FLAG: --registry-qps="5" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757623 4592 flags.go:64] FLAG: --reserved-cpus="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757628 4592 flags.go:64] FLAG: --reserved-memory="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757635 4592 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757641 4592 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757647 4592 flags.go:64] FLAG: --rotate-certificates="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757652 4592 flags.go:64] FLAG: --rotate-server-certificates="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757658 4592 flags.go:64] FLAG: --runonce="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757663 4592 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757669 4592 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757675 4592 flags.go:64] FLAG: --seccomp-default="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757680 4592 flags.go:64] FLAG: --serialize-image-pulls="true" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757685 4592 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757691 4592 flags.go:64] FLAG: --storage-driver-db="cadvisor" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757697 4592 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757703 4592 flags.go:64] FLAG: --storage-driver-password="root" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757708 4592 flags.go:64] FLAG: --storage-driver-secure="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757713 4592 flags.go:64] FLAG: --storage-driver-table="stats" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757719 4592 flags.go:64] FLAG: --storage-driver-user="root" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757729 4592 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757735 4592 flags.go:64] FLAG: --sync-frequency="1m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757741 4592 flags.go:64] FLAG: --system-cgroups="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757748 4592 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757758 4592 flags.go:64] FLAG: --system-reserved-cgroup="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757763 4592 flags.go:64] FLAG: --tls-cert-file="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757769 4592 flags.go:64] FLAG: --tls-cipher-suites="[]" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757782 4592 flags.go:64] FLAG: --tls-min-version="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757788 4592 flags.go:64] FLAG: --tls-private-key-file="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757793 4592 flags.go:64] FLAG: --topology-manager-policy="none" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757799 4592 flags.go:64] FLAG: --topology-manager-policy-options="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757805 4592 flags.go:64] FLAG: --topology-manager-scope="container" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757811 4592 flags.go:64] FLAG: --v="2" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757818 4592 flags.go:64] FLAG: --version="false" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757827 4592 flags.go:64] FLAG: --vmodule="" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757834 4592 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.757840 4592 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.757978 4592 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.757986 4592 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.757992 4592 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.757997 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758002 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758007 4592 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758011 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758016 4592 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758021 4592 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758026 4592 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758031 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758035 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758040 4592 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758045 4592 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758050 4592 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758057 4592 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758061 4592 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758066 4592 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758071 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758076 4592 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758081 4592 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758086 4592 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758091 4592 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758095 4592 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758100 4592 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758106 4592 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758111 4592 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758116 4592 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758121 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758127 4592 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758132 4592 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758138 4592 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758143 4592 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758171 4592 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758177 4592 feature_gate.go:330] unrecognized feature gate: Example Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758182 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758188 4592 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758193 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758197 4592 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758202 4592 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758207 4592 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758215 4592 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758221 4592 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758227 4592 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758232 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758237 4592 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758242 4592 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758247 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758252 4592 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758258 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758263 4592 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758268 4592 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758273 4592 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758278 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758283 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758288 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758293 4592 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758298 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758303 4592 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758308 4592 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758313 4592 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758317 4592 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758322 4592 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758327 4592 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758332 4592 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758338 4592 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758345 4592 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758350 4592 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758357 4592 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758364 4592 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.758370 4592 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.758386 4592 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.766616 4592 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.766656 4592 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766728 4592 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766735 4592 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766740 4592 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766744 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766748 4592 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766752 4592 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766758 4592 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766762 4592 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766766 4592 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766770 4592 feature_gate.go:330] unrecognized feature gate: Example Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766773 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766777 4592 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766780 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766784 4592 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766787 4592 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766790 4592 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766794 4592 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766797 4592 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766801 4592 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766804 4592 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766808 4592 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766813 4592 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766818 4592 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766822 4592 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766828 4592 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766833 4592 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766837 4592 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766841 4592 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766845 4592 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766849 4592 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766853 4592 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766856 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766860 4592 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766865 4592 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766869 4592 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766873 4592 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766877 4592 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766880 4592 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766883 4592 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766887 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766892 4592 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766896 4592 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766901 4592 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766906 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766929 4592 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766934 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766938 4592 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766941 4592 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766945 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766948 4592 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766952 4592 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766955 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766959 4592 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766963 4592 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766966 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766970 4592 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766973 4592 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766977 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766981 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766985 4592 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766989 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766992 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766996 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.766999 4592 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767003 4592 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767007 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767010 4592 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767014 4592 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767017 4592 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767021 4592 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767024 4592 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.767032 4592 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767150 4592 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767171 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767176 4592 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767180 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767184 4592 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767187 4592 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767191 4592 feature_gate.go:330] unrecognized feature gate: NewOLM Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767195 4592 feature_gate.go:330] unrecognized feature gate: SignatureStores Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767199 4592 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767204 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767209 4592 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767215 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767220 4592 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767224 4592 feature_gate.go:330] unrecognized feature gate: GatewayAPI Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767229 4592 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767234 4592 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767238 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767242 4592 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767246 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767252 4592 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767257 4592 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767262 4592 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767268 4592 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767273 4592 feature_gate.go:330] unrecognized feature gate: PinnedImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767278 4592 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767299 4592 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767304 4592 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767309 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767314 4592 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767318 4592 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767324 4592 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767328 4592 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767333 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767338 4592 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767345 4592 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767350 4592 feature_gate.go:330] unrecognized feature gate: Example Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767357 4592 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767362 4592 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767368 4592 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767372 4592 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767375 4592 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767379 4592 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767383 4592 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767387 4592 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767391 4592 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767395 4592 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767398 4592 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767402 4592 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767406 4592 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767409 4592 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767414 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfig Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767418 4592 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767422 4592 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767427 4592 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767431 4592 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767435 4592 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767440 4592 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767444 4592 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767450 4592 feature_gate.go:330] unrecognized feature gate: OVNObservability Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767455 4592 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767461 4592 feature_gate.go:330] unrecognized feature gate: PlatformOperators Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767465 4592 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767470 4592 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767474 4592 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767479 4592 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767483 4592 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767486 4592 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767491 4592 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767495 4592 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767499 4592 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Sep 29 16:51:10 crc kubenswrapper[4592]: W0929 16:51:10.767503 4592 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.767510 4592 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.768271 4592 server.go:940] "Client rotation is on, will bootstrap in background" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.772762 4592 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.772880 4592 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.774344 4592 server.go:997] "Starting client certificate rotation" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.774375 4592 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.774527 4592 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-02 05:50:26.352679421 +0000 UTC Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.774596 4592 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 2268h59m15.578086528s for next certificate rotation Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.825372 4592 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.828449 4592 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.875988 4592 log.go:25] "Validated CRI v1 runtime API" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.963230 4592 log.go:25] "Validated CRI v1 image API" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.964947 4592 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.978904 4592 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-09-29-16-45-27-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.978961 4592 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:46 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.988534 4592 manager.go:217] Machine: {Timestamp:2025-09-29 16:51:10.987186171 +0000 UTC m=+1.134963872 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199476736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:a7270dda-1e73-4054-97c0-7b6ca81df4ee BootID:a4cccf40-d865-472b-8b1e-2c9ff60e2cb2 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:46 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599738368 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:eb:ba:21 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:eb:ba:21 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:71:fd:e5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:82:58:da Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:40:2b:cf Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:7e:79:92 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ee:f8:9b:b5:e9:97 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6e:40:8d:3b:bf:98 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199476736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.988715 4592 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.988837 4592 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.989113 4592 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.989317 4592 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.989353 4592 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.989555 4592 topology_manager.go:138] "Creating topology manager with none policy" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.989564 4592 container_manager_linux.go:303] "Creating device plugin manager" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.990017 4592 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.990043 4592 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.990814 4592 state_mem.go:36] "Initialized new in-memory state store" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.991220 4592 server.go:1245] "Using root directory" path="/var/lib/kubelet" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.996613 4592 kubelet.go:418] "Attempting to sync node with API server" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.996645 4592 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.996663 4592 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.996682 4592 kubelet.go:324] "Adding apiserver pod source" Sep 29 16:51:10 crc kubenswrapper[4592]: I0929 16:51:10.996693 4592 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.002626 4592 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.003348 4592 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.011782 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.011788 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.011866 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.011866 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.056053 4592 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057654 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057680 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057689 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057697 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057709 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057718 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057725 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057747 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057757 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057766 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057785 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057793 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.057816 4592 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.058301 4592 server.go:1280] "Started kubelet" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.059563 4592 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.059312 4592 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.061188 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:11 crc systemd[1]: Started Kubernetes Kubelet. Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.062777 4592 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.065129 4592 server.go:460] "Adding debug handlers to kubelet server" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.065722 4592 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.065834 4592 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.066296 4592 volume_manager.go:287] "The desired_state_of_world populator starts" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.066312 4592 volume_manager.go:289] "Starting Kubelet Volume Manager" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.066501 4592 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.065964 4592 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 03:53:47.86328445 +0000 UTC Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.066718 4592 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1379h2m36.796572982s for next certificate rotation Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.066801 4592 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.068628 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="200ms" Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.068777 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.068855 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.069087 4592 factory.go:55] Registering systemd factory Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.069112 4592 factory.go:221] Registration of the systemd container factory successfully Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.074136 4592 factory.go:153] Registering CRI-O factory Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.074189 4592 factory.go:221] Registration of the crio container factory successfully Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.074288 4592 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.074315 4592 factory.go:103] Registering Raw factory Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.074342 4592 manager.go:1196] Started watching for new ooms in manager Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.075524 4592 manager.go:319] Starting recovery of all containers Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107329 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107378 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107395 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107407 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107433 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107446 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107458 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107472 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107487 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107499 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107513 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107525 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.107539 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.106114 4592 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.1869cef2a7d85b1f default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-09-29 16:51:11.058271007 +0000 UTC m=+1.206048688,LastTimestamp:2025-09-29 16:51:11.058271007 +0000 UTC m=+1.206048688,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111098 4592 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111136 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111166 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111181 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111192 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111202 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111215 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111237 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111258 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111272 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111287 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111299 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111310 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111320 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111336 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111352 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111367 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111414 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111425 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111435 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111463 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111476 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111489 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111501 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111513 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111526 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111542 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111552 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111565 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111577 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111589 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111601 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111613 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111624 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111635 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111645 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111656 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111669 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111680 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111691 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111706 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111717 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111731 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111744 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111754 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111765 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111775 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111785 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111795 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111805 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111816 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111825 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111835 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111845 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111855 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111864 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111874 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111884 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111894 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111904 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111914 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111923 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111933 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111943 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111953 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111962 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111972 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111981 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111990 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.111999 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112008 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112017 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112026 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112035 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112047 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112058 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112071 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112082 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112096 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112109 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112121 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112135 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112161 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112175 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112195 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112204 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112215 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112224 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112234 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112243 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112253 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112288 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112307 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112326 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112346 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112359 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112372 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112384 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112395 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112407 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112419 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112430 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112439 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112447 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112456 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112488 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112513 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112523 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112571 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112581 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112593 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112605 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112617 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112628 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112639 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112653 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112663 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112673 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112682 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112693 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112713 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112729 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112742 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112754 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112765 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112777 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112786 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112795 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112805 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112814 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112825 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112836 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112849 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112863 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112875 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112885 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112895 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112904 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112915 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112925 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112937 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112951 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112962 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112975 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112986 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.112995 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113005 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113016 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113027 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113040 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113051 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113063 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113075 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113088 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113099 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113111 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113124 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113137 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113167 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113179 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113192 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113205 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113217 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113227 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113239 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113252 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113264 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113278 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113290 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113303 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113316 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113329 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113342 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113355 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113368 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113380 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113392 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113405 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113418 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113432 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113445 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113651 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113665 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113678 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113689 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113698 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113709 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113718 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113728 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113740 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113753 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113767 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113780 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113789 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113799 4592 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113807 4592 reconstruct.go:97] "Volume reconstruction finished" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.113814 4592 reconciler.go:26] "Reconciler: start to sync state" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.119465 4592 manager.go:324] Recovery completed Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.127866 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.129220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.129264 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.129276 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.130480 4592 cpu_manager.go:225] "Starting CPU manager" policy="none" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.130579 4592 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.130671 4592 state_mem.go:36] "Initialized new in-memory state store" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.167301 4592 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.179961 4592 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.181678 4592 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.181745 4592 status_manager.go:217] "Starting to sync pod status with apiserver" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.181775 4592 kubelet.go:2335] "Starting kubelet main sync loop" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.181831 4592 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.182478 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.182558 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.268417 4592 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.270183 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="400ms" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.282342 4592 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.305467 4592 policy_none.go:49] "None policy: Start" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.306969 4592 memory_manager.go:170] "Starting memorymanager" policy="None" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.307003 4592 state_mem.go:35] "Initializing new in-memory state store" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.354817 4592 manager.go:334] "Starting Device Plugin manager" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.354902 4592 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.354917 4592 server.go:79] "Starting device plugin registration server" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.355673 4592 eviction_manager.go:189] "Eviction manager: starting control loop" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.355707 4592 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.356451 4592 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.356535 4592 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.356544 4592 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.363123 4592 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.457902 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.459462 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.459503 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.459514 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.459597 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.460499 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.482965 4592 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.483091 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.484429 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.484564 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.484672 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.484928 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.485916 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.486051 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.487252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.487723 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.488001 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.488243 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.488499 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.488607 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.489208 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.489343 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.489933 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.491767 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.491793 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.491804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.491976 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.492277 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.492467 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.492502 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.492617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.492717 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.493636 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.493661 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.493673 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.493772 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.494129 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.494181 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.494527 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.494551 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.494562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495003 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495026 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495037 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495239 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495365 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.495394 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.496834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.496864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.496875 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.621795 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.621888 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.621925 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.621953 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.621990 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622051 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622171 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622213 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622240 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622265 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622288 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622310 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622334 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622358 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.622378 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.661265 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.662526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.662572 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.662585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.662616 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.663063 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Sep 29 16:51:11 crc kubenswrapper[4592]: E0929 16:51:11.670979 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="800ms" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723573 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723604 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723637 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723666 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723707 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723736 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723764 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723795 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723822 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723847 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723857 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723948 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723876 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723943 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.723982 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724024 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724069 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724130 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724207 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724243 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724137 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724299 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724321 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724314 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724351 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724298 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.724466 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.814106 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.821341 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.849441 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.866815 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-30f04c4dff6e69cbda4805655d2809eeb24619b6c80457245e93dabc5da38e4e WatchSource:0}: Error finding container 30f04c4dff6e69cbda4805655d2809eeb24619b6c80457245e93dabc5da38e4e: Status 404 returned error can't find the container with id 30f04c4dff6e69cbda4805655d2809eeb24619b6c80457245e93dabc5da38e4e Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.869457 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.871362 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-14a22e043dbd08390c9b640a104ca640a5ea718fe0006f9fe5954856bcec3252 WatchSource:0}: Error finding container 14a22e043dbd08390c9b640a104ca640a5ea718fe0006f9fe5954856bcec3252: Status 404 returned error can't find the container with id 14a22e043dbd08390c9b640a104ca640a5ea718fe0006f9fe5954856bcec3252 Sep 29 16:51:11 crc kubenswrapper[4592]: I0929 16:51:11.874734 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.876048 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-276c5689c3c33db35eb4d0daa28b47c843d8f58bb02679e644f34faf277576f3 WatchSource:0}: Error finding container 276c5689c3c33db35eb4d0daa28b47c843d8f58bb02679e644f34faf277576f3: Status 404 returned error can't find the container with id 276c5689c3c33db35eb4d0daa28b47c843d8f58bb02679e644f34faf277576f3 Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.888613 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-7d14a07fb9a303ceefaaa399acd843bade4c397e62ae454cbf38946cc97be443 WatchSource:0}: Error finding container 7d14a07fb9a303ceefaaa399acd843bade4c397e62ae454cbf38946cc97be443: Status 404 returned error can't find the container with id 7d14a07fb9a303ceefaaa399acd843bade4c397e62ae454cbf38946cc97be443 Sep 29 16:51:11 crc kubenswrapper[4592]: W0929 16:51:11.889882 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-137207e63aded48354cc5fe661ed3492cea89f75a92188a3a31fdd6cfd6bcc00 WatchSource:0}: Error finding container 137207e63aded48354cc5fe661ed3492cea89f75a92188a3a31fdd6cfd6bcc00: Status 404 returned error can't find the container with id 137207e63aded48354cc5fe661ed3492cea89f75a92188a3a31fdd6cfd6bcc00 Sep 29 16:51:12 crc kubenswrapper[4592]: W0929 16:51:12.061576 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.061696 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.061738 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.063973 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.064982 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.065012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.065021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.065042 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.065354 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.185734 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"276c5689c3c33db35eb4d0daa28b47c843d8f58bb02679e644f34faf277576f3"} Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.186666 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"30f04c4dff6e69cbda4805655d2809eeb24619b6c80457245e93dabc5da38e4e"} Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.187432 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"14a22e043dbd08390c9b640a104ca640a5ea718fe0006f9fe5954856bcec3252"} Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.189008 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"137207e63aded48354cc5fe661ed3492cea89f75a92188a3a31fdd6cfd6bcc00"} Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.189905 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7d14a07fb9a303ceefaaa399acd843bade4c397e62ae454cbf38946cc97be443"} Sep 29 16:51:12 crc kubenswrapper[4592]: W0929 16:51:12.227719 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.227819 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:12 crc kubenswrapper[4592]: W0929 16:51:12.400750 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.400830 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.472397 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="1.6s" Sep 29 16:51:12 crc kubenswrapper[4592]: W0929 16:51:12.520231 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.520310 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.865589 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.867452 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.867478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.867517 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:12 crc kubenswrapper[4592]: I0929 16:51:12.867539 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:12 crc kubenswrapper[4592]: E0929 16:51:12.867907 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.062651 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.194338 4592 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0" exitCode=0 Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.194396 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.194602 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.195559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.195711 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.195729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.196489 4592 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b" exitCode=0 Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.196536 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.196644 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.197382 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.197400 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.197409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.198400 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199266 4592 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4" exitCode=0 Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199328 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199400 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199432 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.199802 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201473 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201485 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201724 4592 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d" exitCode=0 Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.201801 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.202568 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.202583 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.202591 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.204501 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.204531 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3"} Sep 29 16:51:13 crc kubenswrapper[4592]: I0929 16:51:13.204546 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a"} Sep 29 16:51:13 crc kubenswrapper[4592]: W0929 16:51:13.982324 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:13 crc kubenswrapper[4592]: E0929 16:51:13.982402 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.062503 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:14 crc kubenswrapper[4592]: E0929 16:51:14.073052 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="3.2s" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.208263 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.208306 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.208317 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.208321 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.209027 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.209053 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.209064 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.210616 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.210632 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.211225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.211243 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.211251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.212455 4592 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826" exitCode=0 Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.212510 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.212604 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.213433 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.213454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.213463 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.214994 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.215018 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.215031 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.215041 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.222828 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6bbde651cf91895d2be9af42857fee9880a7b7a9ccc270f86e07499848b23426"} Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.222984 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.223985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.224012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.224021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:14 crc kubenswrapper[4592]: W0929 16:51:14.297049 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:14 crc kubenswrapper[4592]: E0929 16:51:14.297122 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.468510 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.469874 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.469900 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.469908 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:14 crc kubenswrapper[4592]: I0929 16:51:14.469928 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:14 crc kubenswrapper[4592]: E0929 16:51:14.470348 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Sep 29 16:51:14 crc kubenswrapper[4592]: W0929 16:51:14.676292 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:14 crc kubenswrapper[4592]: E0929 16:51:14.676362 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:14 crc kubenswrapper[4592]: W0929 16:51:14.986195 4592 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:14 crc kubenswrapper[4592]: E0929 16:51:14.986266 4592 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.061883 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.227644 4592 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76" exitCode=0 Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.227696 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76"} Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.227794 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.228663 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.228686 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.228694 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233313 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233340 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75"} Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233364 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233453 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233453 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.233456 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234783 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234818 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234879 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234916 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234837 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.234982 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:15 crc kubenswrapper[4592]: I0929 16:51:15.950960 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.244420 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c"} Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.244461 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9"} Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.245880 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.247303 4592 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75" exitCode=255 Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.247338 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75"} Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.247457 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.250215 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.250244 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.250254 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:16 crc kubenswrapper[4592]: I0929 16:51:16.250788 4592 scope.go:117] "RemoveContainer" containerID="94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.131942 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.253313 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5"} Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.253360 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012"} Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.255255 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.257268 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9"} Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.257332 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.257375 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.258275 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.258378 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.258455 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.671958 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.673426 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.673584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.673674 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:17 crc kubenswrapper[4592]: I0929 16:51:17.673791 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.264957 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b"} Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.265005 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.265047 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.265141 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.266585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.266626 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.266638 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.267473 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.267530 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.267547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.500226 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.500401 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.501478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.501520 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:18 crc kubenswrapper[4592]: I0929 16:51:18.501534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.267814 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.269666 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.269726 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.269747 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.635026 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.635294 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.636447 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.636485 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.636496 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:19 crc kubenswrapper[4592]: I0929 16:51:19.810359 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.270119 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.271190 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.271325 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.271407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.744202 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.744403 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.745632 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.745681 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.745693 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.756327 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.756585 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.757859 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.757911 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:20 crc kubenswrapper[4592]: I0929 16:51:20.757934 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:21 crc kubenswrapper[4592]: E0929 16:51:21.363395 4592 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 16:51:21 crc kubenswrapper[4592]: I0929 16:51:21.567879 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Sep 29 16:51:21 crc kubenswrapper[4592]: I0929 16:51:21.568078 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:21 crc kubenswrapper[4592]: I0929 16:51:21.569757 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:21 crc kubenswrapper[4592]: I0929 16:51:21.569802 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:21 crc kubenswrapper[4592]: I0929 16:51:21.569816 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.080474 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.080754 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.082181 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.082217 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.082251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.540646 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.540841 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.541982 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.542017 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.542027 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:22 crc kubenswrapper[4592]: I0929 16:51:22.545335 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:23 crc kubenswrapper[4592]: I0929 16:51:23.276476 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:23 crc kubenswrapper[4592]: I0929 16:51:23.277935 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:23 crc kubenswrapper[4592]: I0929 16:51:23.277967 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:23 crc kubenswrapper[4592]: I0929 16:51:23.277977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:23 crc kubenswrapper[4592]: I0929 16:51:23.281736 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:24 crc kubenswrapper[4592]: I0929 16:51:24.280460 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:24 crc kubenswrapper[4592]: I0929 16:51:24.281617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:24 crc kubenswrapper[4592]: I0929 16:51:24.281734 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:24 crc kubenswrapper[4592]: I0929 16:51:24.281837 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:25 crc kubenswrapper[4592]: I0929 16:51:25.080684 4592 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 16:51:25 crc kubenswrapper[4592]: I0929 16:51:25.080770 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 16:51:26 crc kubenswrapper[4592]: I0929 16:51:26.063272 4592 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Sep 29 16:51:26 crc kubenswrapper[4592]: I0929 16:51:26.711061 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 16:51:26 crc kubenswrapper[4592]: I0929 16:51:26.711345 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 16:51:26 crc kubenswrapper[4592]: I0929 16:51:26.723061 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Sep 29 16:51:26 crc kubenswrapper[4592]: I0929 16:51:26.723195 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Sep 29 16:51:27 crc kubenswrapper[4592]: I0929 16:51:27.138479 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]log ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]etcd ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-api-request-count-filter ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-startkubeinformers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/priority-and-fairness-config-consumer ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/priority-and-fairness-filter ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-apiextensions-informers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-apiextensions-controllers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/crd-informer-synced ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-system-namespaces-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-cluster-authentication-info-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-legacy-token-tracking-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-service-ip-repair-controllers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Sep 29 16:51:27 crc kubenswrapper[4592]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/priority-and-fairness-config-producer ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/bootstrap-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/start-kube-aggregator-informers ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-status-local-available-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-status-remote-available-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-registration-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-wait-for-first-sync ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-discovery-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/kube-apiserver-autoregistration ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]autoregister-completion ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-openapi-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: [+]poststarthook/apiservice-openapiv3-controller ok Sep 29 16:51:27 crc kubenswrapper[4592]: livez check failed Sep 29 16:51:27 crc kubenswrapper[4592]: I0929 16:51:27.141327 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.657728 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.657795 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.745496 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.745557 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.788488 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.788656 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.789727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.789778 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.789791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:30 crc kubenswrapper[4592]: I0929 16:51:30.826473 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.297345 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.298118 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.298176 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.298188 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:31 crc kubenswrapper[4592]: E0929 16:51:31.363482 4592 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Sep 29 16:51:31 crc kubenswrapper[4592]: E0929 16:51:31.701404 4592 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.704550 4592 trace.go:236] Trace[993403246]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 16:51:19.669) (total time: 12034ms): Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[993403246]: ---"Objects listed" error: 12034ms (16:51:31.704) Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[993403246]: [12.034534376s] [12.034534376s] END Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.704578 4592 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.708369 4592 trace.go:236] Trace[672069614]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 16:51:17.859) (total time: 13848ms): Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[672069614]: ---"Objects listed" error: 13848ms (16:51:31.708) Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[672069614]: [13.848560663s] [13.848560663s] END Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.708406 4592 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Sep 29 16:51:31 crc kubenswrapper[4592]: E0929 16:51:31.709132 4592 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.710686 4592 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.710848 4592 trace.go:236] Trace[167318161]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 16:51:18.399) (total time: 13311ms): Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[167318161]: ---"Objects listed" error: 13311ms (16:51:31.710) Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[167318161]: [13.311734602s] [13.311734602s] END Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.710870 4592 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.715104 4592 trace.go:236] Trace[2031869018]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (29-Sep-2025 16:51:19.458) (total time: 12256ms): Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[2031869018]: ---"Objects listed" error: 12256ms (16:51:31.714) Sep 29 16:51:31 crc kubenswrapper[4592]: Trace[2031869018]: [12.256448411s] [12.256448411s] END Sep 29 16:51:31 crc kubenswrapper[4592]: I0929 16:51:31.715136 4592 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.012855 4592 apiserver.go:52] "Watching apiserver" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.026784 4592 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.027057 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.027576 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.027633 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.027678 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.027866 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.028092 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.028117 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.028159 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.028195 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.028254 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029430 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029441 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029584 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029604 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029661 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029717 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.029721 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.030251 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.033856 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.055816 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.066052 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.067452 4592 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.075487 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.086217 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.107071 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113455 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113505 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113532 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113556 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113579 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113604 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113627 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113653 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113675 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113697 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113729 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113754 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113777 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113801 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113826 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113849 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113870 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113894 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113918 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.113978 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114002 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114025 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114047 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114071 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114092 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114115 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114136 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114178 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114183 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114200 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114273 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114299 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114322 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114348 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114369 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114390 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114412 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114431 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114453 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114472 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114491 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114510 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114532 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114554 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114576 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114606 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114629 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114653 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114674 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114696 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114719 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114741 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114761 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114782 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114803 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114827 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114848 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114868 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114890 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114911 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114931 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114951 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114977 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115000 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115024 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115045 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115067 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115087 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115108 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115131 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115221 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115250 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115273 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115296 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115319 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115342 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115363 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115391 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115412 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115433 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115455 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115477 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115498 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115520 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115541 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115563 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115585 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115607 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115662 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115688 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115717 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115740 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115763 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115785 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115807 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115831 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115852 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115876 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115900 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115924 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115950 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115973 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115996 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116018 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116038 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116059 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116082 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116103 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116125 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116168 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116193 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116215 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116235 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116256 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116279 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116300 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116322 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116344 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116366 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116387 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116411 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116477 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116503 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116530 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116554 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116576 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116597 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116618 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116640 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116663 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116685 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116709 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116733 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116756 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116779 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116804 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116827 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116849 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116879 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116903 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116926 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116949 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116970 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116995 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117020 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117041 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117063 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117086 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117107 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117124 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117156 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117175 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117191 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117208 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117226 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117241 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117258 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117275 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117293 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117310 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117327 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117343 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117429 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117448 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117465 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117480 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117495 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117512 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117528 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117545 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117562 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117578 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117595 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117611 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117628 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117646 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117663 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117680 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117697 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117715 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117741 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117758 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117775 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117797 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117820 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117845 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117870 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117892 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117913 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117936 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117954 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117971 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117995 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118019 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118058 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118086 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118134 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118333 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118363 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118389 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118417 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118446 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118480 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118510 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118538 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118564 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118589 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118611 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118631 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118650 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118703 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.114795 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115188 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115441 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.115675 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.116485 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117603 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117653 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.117867 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118106 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118135 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118425 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118547 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118592 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118686 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118771 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.118949 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119190 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119197 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119208 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119381 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119437 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119547 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119628 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119770 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119819 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.119889 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120007 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120016 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120313 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120328 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120395 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120545 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120579 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120636 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120655 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.120750 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.126709 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.127136 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.127601 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.127783 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.127800 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.127985 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128000 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128106 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128281 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128603 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128668 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128671 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128810 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128878 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128954 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129083 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129244 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129320 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129371 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.128959 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129558 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129726 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.129907 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130203 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130331 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130215 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130547 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130609 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130745 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130844 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131055 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131078 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131123 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131727 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131783 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131874 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131949 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131970 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.132680 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.132803 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131349 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131593 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.131641 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.132933 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133064 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133575 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133591 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133270 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133405 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133687 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.133946 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134051 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134211 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134476 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134575 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134923 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.134931 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135118 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135344 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135396 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135501 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135562 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135678 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135773 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135809 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135875 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.135959 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136078 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136293 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136299 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136455 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136458 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136476 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136651 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136925 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.136985 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137023 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137272 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137301 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137394 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137500 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137534 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137700 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137804 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.137968 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.138125 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.138225 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.138395 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.138452 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.138498 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:32.638483484 +0000 UTC m=+22.786261165 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.138669 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.139278 4592 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.141954 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.142182 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.142252 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.142588 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.142630 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.142996 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143171 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143340 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143595 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143707 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143636 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.143827 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.143886 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.143894 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:32.643874339 +0000 UTC m=+22.791652080 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130925 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.130791 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.144220 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.144821 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.144904 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.144980 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:51:32.644967201 +0000 UTC m=+22.792744942 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.146442 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.148995 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.149180 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.149567 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.149974 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.150128 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.150140 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.150552 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.150670 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145136 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145289 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145386 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145440 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145465 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145594 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145787 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145821 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.145851 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.146087 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.146114 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.146335 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.146453 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.147211 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.148700 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.148735 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.148758 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.151787 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.152060 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.157321 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.157884 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.158131 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.158186 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.158197 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.158249 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:32.658225485 +0000 UTC m=+22.806003166 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.158914 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.159056 4592 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.159179 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.159368 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.165335 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.165373 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.165388 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.165447 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:32.665426893 +0000 UTC m=+22.813204624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.166564 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.166964 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.166981 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.167533 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.168497 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.168566 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.171123 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.171127 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.171314 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.171334 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.171872 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.177429 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.180547 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.181384 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.181466 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.181728 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.182107 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.182221 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.182465 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.182708 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.182962 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.183322 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.183329 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.183952 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.184934 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.185102 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.185411 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.187319 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.187784 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.195168 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.198196 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.211403 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.213782 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.218752 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219124 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219166 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219223 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219234 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219243 4592 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219251 4592 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219259 4592 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219267 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219275 4592 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219283 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219291 4592 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219299 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219307 4592 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219316 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219324 4592 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219333 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219327 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219341 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219387 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219398 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219414 4592 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219426 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219436 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219444 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219453 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219461 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219469 4592 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219478 4592 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219486 4592 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219494 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219503 4592 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219512 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219520 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219529 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219538 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219546 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219555 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219563 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219570 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219579 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219587 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219595 4592 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219602 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219610 4592 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219618 4592 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219627 4592 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219635 4592 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219642 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219650 4592 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219657 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219665 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219672 4592 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219680 4592 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219688 4592 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219697 4592 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219705 4592 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219713 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219722 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219730 4592 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219738 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219745 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219754 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219762 4592 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219770 4592 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219778 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219786 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219794 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219802 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219810 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219819 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219827 4592 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219838 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219851 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219859 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219867 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219874 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219882 4592 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219890 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219898 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219906 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219914 4592 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219926 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219935 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219943 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219951 4592 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219959 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219967 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219975 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219983 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219991 4592 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.219999 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220007 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220022 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220030 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220039 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220047 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220054 4592 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220062 4592 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220069 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220077 4592 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220085 4592 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220093 4592 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220100 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220108 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220116 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220124 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220132 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220155 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220163 4592 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220171 4592 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220179 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220187 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220195 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220203 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220210 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220234 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220242 4592 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220250 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220258 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220266 4592 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220274 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220281 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220289 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220297 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220305 4592 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220313 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220322 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220330 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220337 4592 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220345 4592 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220353 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220360 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220368 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220376 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220384 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220392 4592 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220399 4592 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220408 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220416 4592 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220424 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220432 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220440 4592 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220449 4592 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220459 4592 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220470 4592 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220480 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220490 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220499 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220509 4592 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220520 4592 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220531 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220539 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220547 4592 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220555 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220562 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220571 4592 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220579 4592 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220587 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220596 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220606 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220615 4592 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220622 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220631 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220639 4592 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220646 4592 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220654 4592 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220661 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220669 4592 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220677 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220685 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220693 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220700 4592 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220707 4592 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220714 4592 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220721 4592 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220729 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220736 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220744 4592 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220752 4592 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220759 4592 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220766 4592 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220774 4592 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220782 4592 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220790 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220797 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220804 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220812 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220820 4592 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220828 4592 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220836 4592 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220844 4592 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220853 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220860 4592 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220867 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220874 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.220913 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.225473 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.226487 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.229098 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.229955 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.238604 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.247183 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.254564 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.261962 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.270347 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.275594 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.285763 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.301465 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.302022 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.303586 4592 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9" exitCode=255 Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.303628 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9"} Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.303665 4592 scope.go:117] "RemoveContainer" containerID="94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.308114 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.308189 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.318328 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.321528 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.331642 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d495482b9bf2addc236520a0755b27d42ceb37e099c2b0fed5d39f1fe95f75\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"message\\\":\\\"W0929 16:51:14.579372 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0929 16:51:14.579733 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759164674 cert, and key in /tmp/serving-cert-583626793/serving-signer.crt, /tmp/serving-cert-583626793/serving-signer.key\\\\nI0929 16:51:15.060298 1 observer_polling.go:159] Starting file observer\\\\nW0929 16:51:15.062951 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0929 16:51:15.063100 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:15.064324 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-583626793/tls.crt::/tmp/serving-cert-583626793/tls.key\\\\\\\"\\\\nF0929 16:51:15.646458 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.340581 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.341629 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.350534 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.350734 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.356718 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Sep 29 16:51:32 crc kubenswrapper[4592]: W0929 16:51:32.360862 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-feec242ae19cb2b0798d946c77cee172e47b270152e96d44f2d4df382d26a8c0 WatchSource:0}: Error finding container feec242ae19cb2b0798d946c77cee172e47b270152e96d44f2d4df382d26a8c0: Status 404 returned error can't find the container with id feec242ae19cb2b0798d946c77cee172e47b270152e96d44f2d4df382d26a8c0 Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.361078 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.372417 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.378571 4592 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.378859 4592 scope.go:117] "RemoveContainer" containerID="5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.379088 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.384198 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.399438 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.412711 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:32 crc kubenswrapper[4592]: W0929 16:51:32.454853 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-98ce354c42482ef684321a7ea49b6501bea0707e8fed1ad3f11a9f269e4ece20 WatchSource:0}: Error finding container 98ce354c42482ef684321a7ea49b6501bea0707e8fed1ad3f11a9f269e4ece20: Status 404 returned error can't find the container with id 98ce354c42482ef684321a7ea49b6501bea0707e8fed1ad3f11a9f269e4ece20 Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.724358 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.724430 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.724452 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.724476 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:32 crc kubenswrapper[4592]: I0929 16:51:32.724498 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724558 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:51:33.724504669 +0000 UTC m=+23.872282350 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724598 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724566 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724601 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724656 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:33.724642643 +0000 UTC m=+23.872420474 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724660 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724677 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724606 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724706 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:33.724695175 +0000 UTC m=+23.872472856 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724717 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724733 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:33.724727886 +0000 UTC m=+23.872505567 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724742 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:32 crc kubenswrapper[4592]: E0929 16:51:32.724801 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:33.724790457 +0000 UTC m=+23.872568138 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.185431 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.185902 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.187394 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.188112 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.307732 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.309391 4592 scope.go:117] "RemoveContainer" containerID="5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.309526 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.322060 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.331992 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.343382 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.352752 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.360633 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.370428 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.379349 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.388354 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.448809 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.449641 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.450470 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.451128 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.451931 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.452579 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.453590 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.454484 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.455114 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.455775 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.457002 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.486868 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.493226 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.493586 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.494179 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.495136 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.495716 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.496851 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.497311 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.498449 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.498937 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.500063 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.500703 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.501137 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.502128 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.502633 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.503439 4592 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.503538 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.518084 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.518567 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.518942 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.599245 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.600107 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.601392 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.602136 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.603217 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.603731 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.604717 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.605399 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.606424 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.607665 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.608621 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.609222 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.610344 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.610877 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.611763 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.612290 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.613262 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.613799 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.614237 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615020 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"98ce354c42482ef684321a7ea49b6501bea0707e8fed1ad3f11a9f269e4ece20"} Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615041 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ac52ba30f5ef8a2884548670af372e5f1d18c3a29f14f4efa02cfbb14380311a"} Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615054 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-k5ts8"] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615292 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-gbbtb"] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615414 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-dfqzg"] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615441 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615610 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615980 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"feec242ae19cb2b0798d946c77cee172e47b270152e96d44f2d4df382d26a8c0"} Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.615996 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-n7rcv"] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.616073 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.616705 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-47pt5"] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.616798 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.617469 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.641356 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.642893 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.642961 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643003 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643163 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643179 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643268 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643281 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643348 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643362 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643470 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643587 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643665 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.643679 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644037 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644206 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644310 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644398 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644566 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.644702 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.647472 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.651297 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.653941 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.663892 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.672191 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.681886 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.690304 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.698393 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.706242 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.714716 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.721133 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.728751 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.732844 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.732952 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.732984 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733008 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733064 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8ggf\" (UniqueName: \"kubernetes.io/projected/4cc986fa-6620-43ff-ae05-11c71e326035-kube-api-access-h8ggf\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733109 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733157 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2f9a55e9-2c59-4873-a10c-74f3f529aa72-hosts-file\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733187 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733210 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.733280 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733365 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733437 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.733519 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:51:35.7334961 +0000 UTC m=+25.881273771 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733556 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95m58\" (UniqueName: \"kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.733633 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:35.733559752 +0000 UTC m=+25.881337433 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733665 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-binary-copy\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733690 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733707 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733744 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.733968 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734001 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-multus\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734027 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-etc-kubernetes\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734060 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-system-cni-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734053 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734138 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734164 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734086 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734239 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:35.734215061 +0000 UTC m=+25.881992742 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734302 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5bgk\" (UniqueName: \"kubernetes.io/projected/58235808-6fc6-4723-84e4-59f2d38319f1-kube-api-access-n5bgk\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734360 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734423 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-kubelet\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734470 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734471 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-daemon-config\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.734517 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:35.734505729 +0000 UTC m=+25.882283410 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734547 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-system-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734582 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-k8s-cni-cncf-io\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734918 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-hostroot\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734938 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mxdv\" (UniqueName: \"kubernetes.io/projected/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-kube-api-access-4mxdv\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734958 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.734977 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735030 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4cc986fa-6620-43ff-ae05-11c71e326035-rootfs\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735047 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4cc986fa-6620-43ff-ae05-11c71e326035-proxy-tls\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735068 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-os-release\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735089 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-bin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735112 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735131 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735172 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735194 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-cnibin\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735211 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-os-release\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735230 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735248 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-socket-dir-parent\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735264 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-conf-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735281 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-multus-certs\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735313 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735331 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735346 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4cc986fa-6620-43ff-ae05-11c71e326035-mcd-auth-proxy-config\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735363 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cnibin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735382 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9wcw\" (UniqueName: \"kubernetes.io/projected/2f9a55e9-2c59-4873-a10c-74f3f529aa72-kube-api-access-k9wcw\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735401 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735417 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cni-binary-copy\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735434 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-netns\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735452 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735470 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.735491 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.735613 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.735623 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.735632 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:33 crc kubenswrapper[4592]: E0929 16:51:33.735712 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:35.735699634 +0000 UTC m=+25.883477315 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.745259 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.753960 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.761865 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.773877 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.805081 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.823134 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836331 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cnibin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836369 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9wcw\" (UniqueName: \"kubernetes.io/projected/2f9a55e9-2c59-4873-a10c-74f3f529aa72-kube-api-access-k9wcw\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836356 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836492 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cnibin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836386 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836577 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836603 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cni-binary-copy\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836624 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-netns\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836669 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836705 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836724 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836743 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836765 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8ggf\" (UniqueName: \"kubernetes.io/projected/4cc986fa-6620-43ff-ae05-11c71e326035-kube-api-access-h8ggf\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836784 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836806 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2f9a55e9-2c59-4873-a10c-74f3f529aa72-hosts-file\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836825 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836842 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836860 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836880 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95m58\" (UniqueName: \"kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836905 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836913 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-binary-copy\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836942 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836958 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836972 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-multus\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.836987 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-etc-kubernetes\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837001 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-system-cni-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837017 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837031 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5bgk\" (UniqueName: \"kubernetes.io/projected/58235808-6fc6-4723-84e4-59f2d38319f1-kube-api-access-n5bgk\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837071 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-kubelet\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837084 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-system-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837097 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-k8s-cni-cncf-io\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837113 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-hostroot\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837126 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-daemon-config\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837159 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4cc986fa-6620-43ff-ae05-11c71e326035-proxy-tls\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837175 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-os-release\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837187 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-bin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837201 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mxdv\" (UniqueName: \"kubernetes.io/projected/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-kube-api-access-4mxdv\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837216 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837230 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837248 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4cc986fa-6620-43ff-ae05-11c71e326035-rootfs\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837304 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837323 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837343 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-cnibin\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837361 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-os-release\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837377 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-multus-certs\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837394 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837392 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-cni-binary-copy\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837409 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-socket-dir-parent\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837426 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-conf-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837442 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837446 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837459 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837475 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4cc986fa-6620-43ff-ae05-11c71e326035-mcd-auth-proxy-config\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837545 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-binary-copy\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837589 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837622 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-netns\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837633 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/2f9a55e9-2c59-4873-a10c-74f3f529aa72-hosts-file\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837669 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837702 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.837959 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4cc986fa-6620-43ff-ae05-11c71e326035-mcd-auth-proxy-config\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838114 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/58235808-6fc6-4723-84e4-59f2d38319f1-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838175 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838207 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838233 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838262 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838298 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-bin\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838324 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838351 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-cni-multus\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838381 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-etc-kubernetes\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838410 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-system-cni-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838439 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838551 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-cnibin\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838733 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838771 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838806 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/4cc986fa-6620-43ff-ae05-11c71e326035-rootfs\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838821 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.838836 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839019 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-var-lib-kubelet\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839074 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-system-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839109 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-k8s-cni-cncf-io\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839159 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-hostroot\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839423 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839641 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-daemon-config\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839707 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-os-release\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839829 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-socket-dir-parent\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839890 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-os-release\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839925 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-host-run-multus-certs\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839972 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-cni-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.839975 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.840008 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.840020 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-multus-conf-dir\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.843710 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4cc986fa-6620-43ff-ae05-11c71e326035-proxy-tls\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.847942 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/58235808-6fc6-4723-84e4-59f2d38319f1-tuning-conf-dir\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.852178 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.856718 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.862415 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mxdv\" (UniqueName: \"kubernetes.io/projected/2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89-kube-api-access-4mxdv\") pod \"multus-gbbtb\" (UID: \"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\") " pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.862512 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5bgk\" (UniqueName: \"kubernetes.io/projected/58235808-6fc6-4723-84e4-59f2d38319f1-kube-api-access-n5bgk\") pod \"multus-additional-cni-plugins-n7rcv\" (UID: \"58235808-6fc6-4723-84e4-59f2d38319f1\") " pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.870095 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95m58\" (UniqueName: \"kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58\") pod \"ovnkube-node-47pt5\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.875044 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.876254 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8ggf\" (UniqueName: \"kubernetes.io/projected/4cc986fa-6620-43ff-ae05-11c71e326035-kube-api-access-h8ggf\") pod \"machine-config-daemon-dfqzg\" (UID: \"4cc986fa-6620-43ff-ae05-11c71e326035\") " pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.878343 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9wcw\" (UniqueName: \"kubernetes.io/projected/2f9a55e9-2c59-4873-a10c-74f3f529aa72-kube-api-access-k9wcw\") pod \"node-resolver-k5ts8\" (UID: \"2f9a55e9-2c59-4873-a10c-74f3f529aa72\") " pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.886868 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.896368 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.906301 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.928477 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-k5ts8" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.933316 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-gbbtb" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.939213 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:51:33 crc kubenswrapper[4592]: W0929 16:51:33.946563 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f9a55e9_2c59_4873_a10c_74f3f529aa72.slice/crio-a96dd6697b9e3ad89607ad796fac6da2ca4484f19e6ab7d8d8da2851097eb88b WatchSource:0}: Error finding container a96dd6697b9e3ad89607ad796fac6da2ca4484f19e6ab7d8d8da2851097eb88b: Status 404 returned error can't find the container with id a96dd6697b9e3ad89607ad796fac6da2ca4484f19e6ab7d8d8da2851097eb88b Sep 29 16:51:33 crc kubenswrapper[4592]: W0929 16:51:33.954436 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c56ff1e_8dc0_43d1_bdf7_9eb71ffc5c89.slice/crio-36911d7e5f4caa61a2810bee9613e02afc9deee1119c77f1084e734e1c3b6e24 WatchSource:0}: Error finding container 36911d7e5f4caa61a2810bee9613e02afc9deee1119c77f1084e734e1c3b6e24: Status 404 returned error can't find the container with id 36911d7e5f4caa61a2810bee9613e02afc9deee1119c77f1084e734e1c3b6e24 Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.956241 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" Sep 29 16:51:33 crc kubenswrapper[4592]: I0929 16:51:33.964090 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:34 crc kubenswrapper[4592]: W0929 16:51:34.014463 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58235808_6fc6_4723_84e4_59f2d38319f1.slice/crio-e95bfd8c26def494c1518700cc2144f9e67ca79b95e959aea86cbf92f0cdf111 WatchSource:0}: Error finding container e95bfd8c26def494c1518700cc2144f9e67ca79b95e959aea86cbf92f0cdf111: Status 404 returned error can't find the container with id e95bfd8c26def494c1518700cc2144f9e67ca79b95e959aea86cbf92f0cdf111 Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.182136 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:34 crc kubenswrapper[4592]: E0929 16:51:34.182265 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.182340 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:34 crc kubenswrapper[4592]: E0929 16:51:34.182416 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.182594 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:34 crc kubenswrapper[4592]: E0929 16:51:34.182662 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.316988 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-k5ts8" event={"ID":"2f9a55e9-2c59-4873-a10c-74f3f529aa72","Type":"ContainerStarted","Data":"a96dd6697b9e3ad89607ad796fac6da2ca4484f19e6ab7d8d8da2851097eb88b"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.317862 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerStarted","Data":"e95bfd8c26def494c1518700cc2144f9e67ca79b95e959aea86cbf92f0cdf111"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.318715 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"f9144a2d1e0391fa7832e15945750b94057f1c97b6bcb71db2dc79b159c762fd"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.319751 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.320802 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.321476 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerStarted","Data":"36911d7e5f4caa61a2810bee9613e02afc9deee1119c77f1084e734e1c3b6e24"} Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.334085 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.345644 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.353252 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.361204 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.375111 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.386613 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.401099 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.419286 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.433368 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.454387 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.471342 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.488304 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:34 crc kubenswrapper[4592]: I0929 16:51:34.498480 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.324860 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerStarted","Data":"8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.326753 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-k5ts8" event={"ID":"2f9a55e9-2c59-4873-a10c-74f3f529aa72","Type":"ContainerStarted","Data":"1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.328266 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22" exitCode=0 Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.328330 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.328356 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"53ac2a49c8b45aa9d478914f5bdb6d9587677e64fe398059924ee465ea3e7972"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.330423 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.332448 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b" exitCode=0 Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.332489 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.338903 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.338947 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.343507 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c"} Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.344792 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.360274 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.378189 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.389872 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.402362 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.419039 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.434848 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.452364 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.474192 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.501399 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.514949 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.551079 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.572003 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.591111 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.614833 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.628378 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.638034 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.649506 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.661954 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.678810 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.690890 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.704443 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.725289 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.752088 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.760050 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.760179 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760258 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:51:39.760229176 +0000 UTC m=+29.908006857 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760267 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760337 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:39.760329809 +0000 UTC m=+29.908107490 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.760605 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760747 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760782 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760798 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.760749 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.760847 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:39.760830345 +0000 UTC m=+29.908608026 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.760902 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761047 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761083 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761095 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761166 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:39.761136543 +0000 UTC m=+29.908914224 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761301 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: E0929 16:51:35.761456 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:39.761427152 +0000 UTC m=+29.909204983 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.769265 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:35 crc kubenswrapper[4592]: I0929 16:51:35.780732 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.182638 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:36 crc kubenswrapper[4592]: E0929 16:51:36.182762 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.183062 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:36 crc kubenswrapper[4592]: E0929 16:51:36.183111 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.183165 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:36 crc kubenswrapper[4592]: E0929 16:51:36.183215 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.350184 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.350488 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.350510 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.350519 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.350528 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.353407 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerStarted","Data":"19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429"} Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.367105 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.387041 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.400071 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-b9sgl"] Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.400452 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.402559 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.402639 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.403678 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.403728 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.408196 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.420311 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.431451 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.447924 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.458115 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.467655 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f366b299-488d-4b75-8df9-591e502330c8-serviceca\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.467719 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fprqh\" (UniqueName: \"kubernetes.io/projected/f366b299-488d-4b75-8df9-591e502330c8-kube-api-access-fprqh\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.467774 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f366b299-488d-4b75-8df9-591e502330c8-host\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.470366 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.481197 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.495722 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.507911 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.524350 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.535266 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.546065 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.556269 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.565293 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.569112 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f366b299-488d-4b75-8df9-591e502330c8-serviceca\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.569169 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fprqh\" (UniqueName: \"kubernetes.io/projected/f366b299-488d-4b75-8df9-591e502330c8-kube-api-access-fprqh\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.569193 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f366b299-488d-4b75-8df9-591e502330c8-host\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.569250 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f366b299-488d-4b75-8df9-591e502330c8-host\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.570048 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f366b299-488d-4b75-8df9-591e502330c8-serviceca\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.577777 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.588598 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fprqh\" (UniqueName: \"kubernetes.io/projected/f366b299-488d-4b75-8df9-591e502330c8-kube-api-access-fprqh\") pod \"node-ca-b9sgl\" (UID: \"f366b299-488d-4b75-8df9-591e502330c8\") " pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.589504 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.601939 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.616674 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.629847 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.630639 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-b9sgl" Sep 29 16:51:36 crc kubenswrapper[4592]: W0929 16:51:36.642978 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf366b299_488d_4b75_8df9_591e502330c8.slice/crio-d599887b14a62a572b3ad831d41d5d07556fb30dc98e825bb991f20d41eaa82d WatchSource:0}: Error finding container d599887b14a62a572b3ad831d41d5d07556fb30dc98e825bb991f20d41eaa82d: Status 404 returned error can't find the container with id d599887b14a62a572b3ad831d41d5d07556fb30dc98e825bb991f20d41eaa82d Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.651532 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.666266 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.676979 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.687302 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.697266 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:36 crc kubenswrapper[4592]: I0929 16:51:36.707835 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.361307 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d"} Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.363003 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429" exitCode=0 Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.363071 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429"} Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.367522 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-b9sgl" event={"ID":"f366b299-488d-4b75-8df9-591e502330c8","Type":"ContainerStarted","Data":"fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9"} Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.367693 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-b9sgl" event={"ID":"f366b299-488d-4b75-8df9-591e502330c8","Type":"ContainerStarted","Data":"d599887b14a62a572b3ad831d41d5d07556fb30dc98e825bb991f20d41eaa82d"} Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.385673 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.402342 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.415738 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.431664 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.444755 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.458821 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.471694 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.489595 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.500727 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.511915 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.522993 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.535582 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.547327 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.559487 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.572446 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.583235 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.590632 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.598837 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.607018 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.619313 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.633193 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.644191 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.659302 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.676306 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.689886 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.701684 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.713188 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:37 crc kubenswrapper[4592]: I0929 16:51:37.724190 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:37Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.116970 4592 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.118660 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.118701 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.118712 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.118815 4592 kubelet_node_status.go:76] "Attempting to register node" node="crc" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.123814 4592 kubelet_node_status.go:115] "Node was previously registered" node="crc" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.124100 4592 kubelet_node_status.go:79] "Successfully registered node" node="crc" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.124976 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.125002 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.125010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.125022 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.125031 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.144543 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.148732 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.148790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.148805 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.148821 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.148834 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.165754 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.168748 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.168778 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.168795 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.168809 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.168819 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.180905 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.181966 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.182007 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.182084 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.181967 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.182217 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.182393 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.186595 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.186749 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.186906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.186989 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.187083 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.197634 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.200751 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.200882 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.200973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.201065 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.201182 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.215478 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: E0929 16:51:38.215933 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.218129 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.218255 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.218320 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.218390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.218467 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.320483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.320683 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.320770 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.320827 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.320878 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.373626 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40" exitCode=0 Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.373733 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.392786 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.411847 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.423356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.423394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.423404 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.423425 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.423436 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.428340 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.445536 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.465480 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.477510 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.492585 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.505970 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.519792 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.527802 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.527843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.527857 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.527871 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.527880 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.551079 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.565657 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.587971 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.599203 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.629547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.629586 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.629599 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.629616 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.629628 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.632747 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:38Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.731779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.731829 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.731845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.731861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.731872 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.833837 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.833884 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.833896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.833913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.833925 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.937216 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.937269 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.937286 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.937315 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:38 crc kubenswrapper[4592]: I0929 16:51:38.937333 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:38Z","lastTransitionTime":"2025-09-29T16:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.040678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.040769 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.040791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.040819 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.040842 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.143815 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.143876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.143894 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.143920 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.143937 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.246118 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.246477 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.246490 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.246510 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.246526 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.349876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.349930 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.349946 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.349971 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.349990 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.381907 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.384905 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388" exitCode=0 Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.384947 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.411381 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.433016 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.444303 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.452362 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.452399 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.452408 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.452425 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.452438 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.456243 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.473690 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.485742 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.497312 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.508561 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.518927 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.528291 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.539615 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554676 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554711 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554733 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554741 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.554705 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.567793 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.584996 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.657100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.657225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.657242 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.657266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.657283 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.760358 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.760435 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.760460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.760489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.760511 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.801085 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.801208 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.801238 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.801275 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.801298 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801339 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.801298751 +0000 UTC m=+37.949076452 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801424 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801621 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.80160781 +0000 UTC m=+37.949385491 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801486 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801676 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801688 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801727 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.801719263 +0000 UTC m=+37.949496944 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801492 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801757 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.801751564 +0000 UTC m=+37.949529235 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801505 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801772 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801779 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:39 crc kubenswrapper[4592]: E0929 16:51:39.801795 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.801790565 +0000 UTC m=+37.949568246 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.862354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.862394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.862406 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.862424 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.862437 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.965023 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.965059 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.965067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.965085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:39 crc kubenswrapper[4592]: I0929 16:51:39.965097 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:39Z","lastTransitionTime":"2025-09-29T16:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.067054 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.067103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.067120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.067139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.067169 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.169221 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.169265 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.169275 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.169287 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.169296 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.182029 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:40 crc kubenswrapper[4592]: E0929 16:51:40.182229 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.182506 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.182561 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:40 crc kubenswrapper[4592]: E0929 16:51:40.182738 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:40 crc kubenswrapper[4592]: E0929 16:51:40.182860 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.272186 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.272264 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.272288 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.272320 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.272347 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.374650 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.374708 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.374727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.374748 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.374763 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.392513 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerStarted","Data":"e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.409042 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.426968 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.449476 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.468732 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.476849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.476884 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.476895 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.476912 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.476922 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.481252 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.493087 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.507300 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.517821 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.529631 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.539221 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.550891 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.567521 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.579319 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.579344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.579353 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.579365 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.579372 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.582558 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.596795 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:40Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.656539 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.657288 4592 scope.go:117] "RemoveContainer" containerID="5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9" Sep 29 16:51:40 crc kubenswrapper[4592]: E0929 16:51:40.657478 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.682074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.682126 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.682135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.682168 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.682179 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.784403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.784445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.784453 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.784641 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.784652 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.886699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.886724 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.886732 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.886744 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.886752 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.989425 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.989476 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.989493 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.989517 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:40 crc kubenswrapper[4592]: I0929 16:51:40.989534 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:40Z","lastTransitionTime":"2025-09-29T16:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.091796 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.091844 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.091855 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.091873 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.091887 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.193445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.193478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.193487 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.193500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.193510 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.196591 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.210291 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.226775 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.240734 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.256951 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.268034 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.277860 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.285417 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.294202 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.295737 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.295775 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.295786 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.295804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.295814 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.304133 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.317922 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.329216 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.340099 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.353313 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.397184 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.397227 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.397238 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.397277 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.397289 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.500493 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.500555 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.500571 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.500596 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.500612 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.608978 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.609009 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.609019 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.609035 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.609048 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.710978 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.711044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.711056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.711071 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.711082 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.813158 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.813199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.813211 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.813227 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.813237 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.915008 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.915053 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.915064 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.915082 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:41 crc kubenswrapper[4592]: I0929 16:51:41.915094 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:41Z","lastTransitionTime":"2025-09-29T16:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.019549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.019612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.019642 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.019684 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.019708 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.122774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.122845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.122856 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.122874 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.122888 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.182955 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:42 crc kubenswrapper[4592]: E0929 16:51:42.183535 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.183076 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:42 crc kubenswrapper[4592]: E0929 16:51:42.183634 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.183032 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:42 crc kubenswrapper[4592]: E0929 16:51:42.183685 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.226085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.226129 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.226152 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.226171 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.226186 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.328942 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.329035 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.329052 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.329077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.329092 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.401490 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689" exitCode=0 Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.401534 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.415177 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.415800 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.415854 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.440953 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.440985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.440996 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.441011 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.441026 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.454204 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.455987 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.458547 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.475028 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.488249 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.503503 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.523273 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.537005 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.543071 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.543101 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.543109 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.543122 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.543131 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.555105 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.578405 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.592371 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.610153 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.622593 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.636413 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.645294 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.645325 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.645333 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.645348 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.645358 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.648841 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.661309 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.675480 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.687934 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.703497 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.716691 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.739369 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.748220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.748246 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.748254 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.748266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.748275 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.752922 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.764613 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.775887 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.788495 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.798210 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.810265 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.821927 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.836318 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.850909 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.851235 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.851375 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.851499 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.851612 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.855845 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:42Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.961508 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.961553 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.961566 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.961585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:42 crc kubenswrapper[4592]: I0929 16:51:42.961598 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:42Z","lastTransitionTime":"2025-09-29T16:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.064409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.064587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.064680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.064761 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.064826 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.167254 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.167282 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.167289 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.167301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.167310 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.271582 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.271621 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.271631 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.271651 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.271669 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.374538 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.374828 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.374844 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.374861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.374871 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.427332 4592 generic.go:334] "Generic (PLEG): container finished" podID="58235808-6fc6-4723-84e4-59f2d38319f1" containerID="636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81" exitCode=0 Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.427395 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerDied","Data":"636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.427515 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.443659 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.455631 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.469207 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.478102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.478159 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.478172 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.478187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.478198 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.482239 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.492823 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.505260 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.517212 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.529041 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.541337 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.559243 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.568984 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.578545 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.580739 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.580760 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.580768 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.580781 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.580789 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.590419 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.602278 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:43Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.683365 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.683398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.683407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.683421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.683429 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.787704 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.787756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.787768 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.787786 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.787800 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.890501 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.890558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.890569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.890584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.890596 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.992783 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.992855 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.992896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.992927 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:43 crc kubenswrapper[4592]: I0929 16:51:43.992950 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:43Z","lastTransitionTime":"2025-09-29T16:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.095383 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.095424 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.095436 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.095454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.095465 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.183098 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.183098 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.183119 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:44 crc kubenswrapper[4592]: E0929 16:51:44.183472 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:44 crc kubenswrapper[4592]: E0929 16:51:44.183543 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:44 crc kubenswrapper[4592]: E0929 16:51:44.183307 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.197565 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.197611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.197623 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.197638 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.197649 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.300220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.300289 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.300298 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.300328 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.300338 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.402935 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.402983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.402998 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.403021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.403033 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.434233 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" event={"ID":"58235808-6fc6-4723-84e4-59f2d38319f1","Type":"ContainerStarted","Data":"f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.434316 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.451723 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.462790 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.474262 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.484624 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.494957 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.504628 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.504700 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.504712 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.504727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.504738 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.507936 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.517810 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.527125 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.538187 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.546540 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.560337 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.571509 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.583291 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.596679 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:44Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.607118 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.607164 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.607173 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.607189 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.607198 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.709029 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.709067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.709078 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.709093 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.709125 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.811398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.811442 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.811454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.811478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.811491 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.916389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.916771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.916787 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.916811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.916829 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:44Z","lastTransitionTime":"2025-09-29T16:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:44 crc kubenswrapper[4592]: I0929 16:51:44.984186 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.018811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.018849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.018860 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.018874 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.018888 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.121381 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.121434 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.121445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.121460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.121469 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.223457 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.223515 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.223534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.223554 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.223569 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.325865 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.325919 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.325928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.325941 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.325949 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.428033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.428076 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.428110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.428128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.428183 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.529983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.530028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.530039 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.530060 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.530072 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.632725 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.632994 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.633091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.633212 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.633303 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.736810 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.737093 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.737208 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.737293 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.737367 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.841047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.841250 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.841283 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.841302 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.841320 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.943026 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.943082 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.943097 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.943116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.943130 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:45Z","lastTransitionTime":"2025-09-29T16:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.980348 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz"] Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.980774 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.982529 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.982706 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.991176 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64794b03-cbe6-4a8e-8502-f2291c53b986-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.991240 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmwwn\" (UniqueName: \"kubernetes.io/projected/64794b03-cbe6-4a8e-8502-f2291c53b986-kube-api-access-wmwwn\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.991273 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:45 crc kubenswrapper[4592]: I0929 16:51:45.991308 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.001717 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:45Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.014844 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.029287 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.040092 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.045063 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.045106 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.045115 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.045128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.045139 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.055520 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.080567 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.091727 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmwwn\" (UniqueName: \"kubernetes.io/projected/64794b03-cbe6-4a8e-8502-f2291c53b986-kube-api-access-wmwwn\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.091770 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.091802 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.091840 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64794b03-cbe6-4a8e-8502-f2291c53b986-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.092467 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.092572 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64794b03-cbe6-4a8e-8502-f2291c53b986-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.095372 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.099785 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64794b03-cbe6-4a8e-8502-f2291c53b986-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.111267 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.114999 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmwwn\" (UniqueName: \"kubernetes.io/projected/64794b03-cbe6-4a8e-8502-f2291c53b986-kube-api-access-wmwwn\") pod \"ovnkube-control-plane-749d76644c-mrgtz\" (UID: \"64794b03-cbe6-4a8e-8502-f2291c53b986\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.138655 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.147988 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.148023 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.148033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.148048 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.148059 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.150695 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.160566 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.171829 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.181008 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.182285 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:46 crc kubenswrapper[4592]: E0929 16:51:46.182367 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.182415 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:46 crc kubenswrapper[4592]: E0929 16:51:46.182451 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.182498 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:46 crc kubenswrapper[4592]: E0929 16:51:46.182538 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.191755 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.217884 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:46Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.251023 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.251076 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.251092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.251116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.251136 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.295512 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" Sep 29 16:51:46 crc kubenswrapper[4592]: W0929 16:51:46.321709 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64794b03_cbe6_4a8e_8502_f2291c53b986.slice/crio-50660a377db20243e59e26a32c8a4ee2d1d9259f5429094ab436f56511a373ed WatchSource:0}: Error finding container 50660a377db20243e59e26a32c8a4ee2d1d9259f5429094ab436f56511a373ed: Status 404 returned error can't find the container with id 50660a377db20243e59e26a32c8a4ee2d1d9259f5429094ab436f56511a373ed Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.354110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.354135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.354159 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.354174 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.354183 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.441137 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" event={"ID":"64794b03-cbe6-4a8e-8502-f2291c53b986","Type":"ContainerStarted","Data":"50660a377db20243e59e26a32c8a4ee2d1d9259f5429094ab436f56511a373ed"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.457751 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.457809 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.457843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.457864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.457882 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.560767 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.560812 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.560822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.560841 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.560852 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.663424 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.663458 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.663468 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.663481 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.663492 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.768954 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.769309 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.769390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.769413 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.769427 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.871722 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.871771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.871781 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.871796 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.871804 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.977585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.977628 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.977639 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.977653 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:46 crc kubenswrapper[4592]: I0929 16:51:46.977665 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:46Z","lastTransitionTime":"2025-09-29T16:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.080593 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.080649 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.080670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.080694 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.080711 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.114663 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-qvsjc"] Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.115142 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.115217 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.128271 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.138982 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.151447 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.163065 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.177958 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.183813 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.183848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.183858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.183871 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.183881 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.192218 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.200043 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.200070 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tsp9\" (UniqueName: \"kubernetes.io/projected/484e63f2-7bae-4e57-ab79-95cba3bad285-kube-api-access-4tsp9\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.204809 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.216790 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.227494 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.243642 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.253471 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.263486 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.271851 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.281103 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.285679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.285704 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.285715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.285728 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.285736 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.292531 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.301026 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.301068 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tsp9\" (UniqueName: \"kubernetes.io/projected/484e63f2-7bae-4e57-ab79-95cba3bad285-kube-api-access-4tsp9\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.301138 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.301229 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:47.801211123 +0000 UTC m=+37.948988804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.301845 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:47Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.321099 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tsp9\" (UniqueName: \"kubernetes.io/projected/484e63f2-7bae-4e57-ab79-95cba3bad285-kube-api-access-4tsp9\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.387834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.387876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.387886 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.387914 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.387925 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.489857 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.489896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.489904 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.489922 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.489931 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.592010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.592041 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.592050 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.592064 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.592073 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.694255 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.694312 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.694330 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.694360 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.694379 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.796373 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.796424 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.796446 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.796462 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.796476 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804505 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804590 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804623 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804673 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:52:03.804656818 +0000 UTC m=+53.952434489 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804700 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804729 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804728 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804756 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:03.804749921 +0000 UTC m=+53.952527602 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.804755 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804841 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804857 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804870 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.804922 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:03.804911276 +0000 UTC m=+53.952688967 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805009 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805022 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805031 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805058 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:03.80504939 +0000 UTC m=+53.952827081 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805115 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805127 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805167 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:03.805133762 +0000 UTC m=+53.952911453 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: E0929 16:51:47.805183 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:48.805175713 +0000 UTC m=+38.952953404 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.898250 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.898287 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.898295 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.898309 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:47 crc kubenswrapper[4592]: I0929 16:51:47.898319 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:47Z","lastTransitionTime":"2025-09-29T16:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.000864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.001108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.001188 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.001206 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.001215 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.108040 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.108087 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.108102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.108134 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.108178 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.182330 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.182468 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.182916 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.182998 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.183049 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.183124 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.210543 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.210615 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.210629 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.210653 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.210666 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.237048 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.237120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.237131 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.237171 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.237193 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.253963 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:48Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.258135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.258191 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.258200 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.258215 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.258226 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.271025 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:48Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.274338 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.274381 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.274390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.274405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.274415 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.284945 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:48Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.288951 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.288987 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.288998 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.289016 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.289029 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.302004 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:48Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.309761 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.309797 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.309809 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.309825 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.309836 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.323330 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:48Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.323467 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.327307 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.327354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.327368 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.327390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.327405 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.429765 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.429822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.429834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.429854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.429867 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.455653 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" event={"ID":"64794b03-cbe6-4a8e-8502-f2291c53b986","Type":"ContainerStarted","Data":"6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.532359 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.532680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.532774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.532870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.532950 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.636822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.636917 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.636935 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.636952 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.636965 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.741486 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.741529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.741547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.741569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.741585 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.816988 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.817138 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:48 crc kubenswrapper[4592]: E0929 16:51:48.817222 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:50.817202193 +0000 UTC m=+40.964979874 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.843707 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.843734 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.843743 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.843756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.843768 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.945739 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.945779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.945787 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.945801 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:48 crc kubenswrapper[4592]: I0929 16:51:48.945812 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:48Z","lastTransitionTime":"2025-09-29T16:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.048334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.048422 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.048445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.048472 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.048493 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.152196 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.152250 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.152263 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.152284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.152297 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.182717 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:49 crc kubenswrapper[4592]: E0929 16:51:49.182950 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.255611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.255666 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.255677 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.255695 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.255707 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.358175 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.358223 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.358235 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.358254 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.358269 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461043 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461070 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461081 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461095 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461104 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.461673 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" event={"ID":"64794b03-cbe6-4a8e-8502-f2291c53b986","Type":"ContainerStarted","Data":"e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.464041 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/0.log" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.466487 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda" exitCode=1 Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.466521 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.467111 4592 scope.go:117] "RemoveContainer" containerID="39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.479231 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.492587 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.505766 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.519516 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.530897 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.547393 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.562803 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.562831 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.562838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.562851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.562859 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.563971 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.582753 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.593994 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.605001 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.616309 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.632102 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.646698 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.656971 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.665693 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.665733 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.665742 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.665758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.665769 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.666291 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.676759 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.690033 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.702854 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.716126 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.727765 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.739760 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.751575 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767524 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767755 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767789 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.767826 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.781331 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.796433 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.813797 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.828923 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.843419 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.854272 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.869945 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.869995 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.870007 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.870022 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.870032 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.878939 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.898706 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.911329 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.971938 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.971973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.971981 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.971995 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:49 crc kubenswrapper[4592]: I0929 16:51:49.972003 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:49Z","lastTransitionTime":"2025-09-29T16:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.074472 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.074519 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.074529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.074543 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.074552 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.177115 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.177178 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.177186 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.177198 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.177209 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.182408 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.182415 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.182533 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.182604 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.182430 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.182676 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.279074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.279114 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.279124 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.279160 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.279172 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.382657 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.382700 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.382710 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.382724 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.382732 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.473768 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/1.log" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.474741 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/0.log" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.478774 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39" exitCode=1 Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.478839 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.478919 4592 scope.go:117] "RemoveContainer" containerID="39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.480114 4592 scope.go:117] "RemoveContainer" containerID="6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39" Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.480470 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.486730 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.486771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.486783 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.486803 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.486815 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.495408 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.511242 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.534903 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.552323 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.589247 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.589299 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.589313 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.589334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.589349 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.590599 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.633945 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.647254 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.662784 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.677187 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.691870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.691906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.691915 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.691929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.691937 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.693527 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.706103 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.715478 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.726564 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.738441 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.747627 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.757512 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.794679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.794722 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.794734 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.794750 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.794762 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.833537 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.833675 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:50 crc kubenswrapper[4592]: E0929 16:51:50.833735 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:51:54.833719124 +0000 UTC m=+44.981496805 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.897218 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.897247 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.897255 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.897268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:50 crc kubenswrapper[4592]: I0929 16:51:50.897277 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:50Z","lastTransitionTime":"2025-09-29T16:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.000765 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.000798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.000809 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.000824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.000835 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.105385 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.105463 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.105486 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.105514 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.105537 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.182268 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:51 crc kubenswrapper[4592]: E0929 16:51:51.182392 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.196055 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.206552 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.207810 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.207834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.207843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.207855 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.207864 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.216203 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.227074 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.239578 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.252267 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.263766 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.278491 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.291527 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.302983 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.310483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.310522 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.310531 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.310545 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.310555 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.319453 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.331962 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.348978 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.361165 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.383081 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.402991 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.412340 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.412380 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.412389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.412403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.412412 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.483598 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/1.log" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.514339 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.514373 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.514382 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.514396 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.514405 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.616976 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.617049 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.617074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.617101 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.617122 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.719564 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.719615 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.719624 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.719638 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.719647 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.822489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.822537 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.822549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.822569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.822583 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.924728 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.924798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.924819 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.924846 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:51 crc kubenswrapper[4592]: I0929 16:51:51.924869 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:51Z","lastTransitionTime":"2025-09-29T16:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.027994 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.028056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.028083 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.028110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.028131 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.131139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.131188 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.131199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.131214 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.131223 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.182056 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.182124 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.182124 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:52 crc kubenswrapper[4592]: E0929 16:51:52.182226 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:52 crc kubenswrapper[4592]: E0929 16:51:52.182301 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:52 crc kubenswrapper[4592]: E0929 16:51:52.182579 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.182832 4592 scope.go:117] "RemoveContainer" containerID="5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.233825 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.233873 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.233886 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.233902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.233913 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.340557 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.340806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.340937 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.341040 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.341155 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.443869 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.443914 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.443925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.443944 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.443957 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.490620 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.492416 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.492783 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.506163 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.519025 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.529933 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.542135 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.545577 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.545623 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.545636 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.545655 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.545667 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.555615 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.567037 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.579994 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.593191 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.609566 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.620580 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.631253 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.640904 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.647998 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.648044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.648054 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.648069 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.648082 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.653271 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.664488 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.673734 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.683344 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:52Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.750422 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.750445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.750453 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.750465 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.750473 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.853304 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.853347 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.853356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.853368 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.853376 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.955734 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.955773 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.955784 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.955799 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:52 crc kubenswrapper[4592]: I0929 16:51:52.955810 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:52Z","lastTransitionTime":"2025-09-29T16:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.058174 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.058209 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.058220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.058237 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.058254 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.160618 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.160665 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.160675 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.160692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.160703 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.183048 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:53 crc kubenswrapper[4592]: E0929 16:51:53.183219 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.262665 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.262703 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.262711 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.262725 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.262734 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.365092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.365207 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.365218 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.365233 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.365250 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.467504 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.467544 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.467556 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.467572 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.467583 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.569449 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.569520 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.569539 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.569566 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.569587 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.671597 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.671641 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.671652 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.671670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.671681 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.773997 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.774045 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.774055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.774072 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.774085 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.876135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.876199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.876210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.876227 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.876240 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.982231 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.982281 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.982294 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.982311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:53 crc kubenswrapper[4592]: I0929 16:51:53.982323 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:53Z","lastTransitionTime":"2025-09-29T16:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.085410 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.085445 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.085454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.085468 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.085477 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.182371 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:54 crc kubenswrapper[4592]: E0929 16:51:54.182516 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.182894 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:54 crc kubenswrapper[4592]: E0929 16:51:54.182961 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.183016 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:54 crc kubenswrapper[4592]: E0929 16:51:54.183064 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.187549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.187576 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.187587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.187601 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.187611 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.290358 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.290409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.290419 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.290434 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.290456 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.392856 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.393469 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.393490 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.393505 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.393517 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.495559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.495912 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.495936 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.496312 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.496365 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.599616 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.599729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.599748 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.599770 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.599783 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.701925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.701962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.701970 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.701982 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.701991 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.803975 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.804008 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.804017 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.804029 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.804040 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.874725 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:54 crc kubenswrapper[4592]: E0929 16:51:54.874918 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:54 crc kubenswrapper[4592]: E0929 16:51:54.874998 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:02.874979884 +0000 UTC m=+53.022757565 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.906758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.906807 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.906822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.906843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:54 crc kubenswrapper[4592]: I0929 16:51:54.906855 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:54Z","lastTransitionTime":"2025-09-29T16:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.009915 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.009960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.009972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.009989 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.010006 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.112044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.112112 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.112131 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.112187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.112206 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.182927 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:55 crc kubenswrapper[4592]: E0929 16:51:55.183463 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.214970 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.215028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.215048 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.215073 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.215091 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.318555 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.318798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.318978 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.319186 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.319360 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.423018 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.423083 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.423105 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.423129 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.423182 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.525833 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.525866 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.525876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.525889 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.525898 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.628255 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.628323 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.628344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.628373 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.628396 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.730904 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.730965 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.730983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.731007 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.731028 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.833025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.833633 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.833746 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.833830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.833890 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.937647 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.937709 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.937723 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.937740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:55 crc kubenswrapper[4592]: I0929 16:51:55.937752 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:55Z","lastTransitionTime":"2025-09-29T16:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.039839 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.039876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.039887 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.039901 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.039913 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.141864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.141940 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.141951 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.141966 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.141976 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.182481 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.182528 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.182591 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:56 crc kubenswrapper[4592]: E0929 16:51:56.182609 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:56 crc kubenswrapper[4592]: E0929 16:51:56.182747 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:56 crc kubenswrapper[4592]: E0929 16:51:56.182864 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.244077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.244141 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.244185 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.244210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.244413 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.349197 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.349258 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.349268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.349282 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.349291 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.451851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.451907 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.451915 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.451928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.451938 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.554163 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.554205 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.554216 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.554232 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.554245 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.656029 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.656075 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.656087 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.656102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.656115 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.758933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.758993 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.759014 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.759044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.759065 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.861848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.862008 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.862050 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.862069 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.862081 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.964356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.964394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.964407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.964424 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:56 crc kubenswrapper[4592]: I0929 16:51:56.964436 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:56Z","lastTransitionTime":"2025-09-29T16:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.066835 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.066896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.066906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.066922 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.066934 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.168849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.168900 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.168910 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.168923 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.168933 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.182216 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:57 crc kubenswrapper[4592]: E0929 16:51:57.182331 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.271901 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.271957 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.271970 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.271987 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.271999 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.374819 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.374860 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.374870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.374884 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.374894 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.477299 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.477333 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.477341 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.477354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.477364 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.579267 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.579305 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.579316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.579332 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.579342 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.681620 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.681670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.681686 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.681701 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.681711 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.783893 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.783937 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.783948 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.783964 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.783975 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.886713 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.886754 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.886763 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.886777 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.886787 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.988521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.988575 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.988591 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.988612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:57 crc kubenswrapper[4592]: I0929 16:51:57.988627 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:57Z","lastTransitionTime":"2025-09-29T16:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.091134 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.091196 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.091206 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.091226 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.091236 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.182447 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.182484 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.182548 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.182580 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.182685 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.182844 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.193165 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.193215 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.193227 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.193242 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.193253 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.295214 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.295252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.295260 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.295273 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.295283 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.354337 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.354376 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.354388 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.354404 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.354416 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.366001 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.369864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.369948 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.369961 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.369976 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.369987 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.380758 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.384103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.384168 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.384180 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.384198 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.384211 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.395737 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.400176 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.400714 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.400732 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.400751 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.400764 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.411992 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.415571 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.415601 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.415611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.415626 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.415635 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.426460 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: E0929 16:51:58.426572 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.427797 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.427822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.427832 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.427845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.427855 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.504840 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.516215 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.519694 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.530752 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.530806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.530824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.530913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.530932 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.538075 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.548460 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.558650 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.569063 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.580190 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.592718 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.609274 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.625711 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.632845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.632922 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.632938 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.632955 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.632995 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.641252 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.660027 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.674764 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.688520 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.701959 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.715536 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.734364 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:58Z is after 2025-08-24T17:21:41Z" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.735239 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.735269 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.735278 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.735291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.735300 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.838407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.838469 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.838481 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.838497 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.838509 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.940729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.940794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.940806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.940824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:58 crc kubenswrapper[4592]: I0929 16:51:58.940834 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:58Z","lastTransitionTime":"2025-09-29T16:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.046929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.046981 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.046994 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.047012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.047036 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.149083 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.149120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.149128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.149157 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.149166 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.182732 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:51:59 crc kubenswrapper[4592]: E0929 16:51:59.183183 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.251361 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.251608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.251710 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.251813 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.251881 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.353526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.353758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.353878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.353996 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.354109 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.456206 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.456245 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.456253 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.456270 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.456279 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.558845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.558895 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.558908 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.558924 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.558938 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.661612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.661659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.661669 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.661683 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.661696 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.764870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.764916 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.764927 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.764939 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.764948 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.867503 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.867549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.867561 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.867578 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.867590 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.970200 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.970262 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.970283 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.970306 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:51:59 crc kubenswrapper[4592]: I0929 16:51:59.970324 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:51:59Z","lastTransitionTime":"2025-09-29T16:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.072767 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.072804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.072813 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.072826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.072835 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.175436 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.175483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.175494 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.175510 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.175520 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.182899 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.182924 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.182960 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:00 crc kubenswrapper[4592]: E0929 16:52:00.183021 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:00 crc kubenswrapper[4592]: E0929 16:52:00.183188 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:00 crc kubenswrapper[4592]: E0929 16:52:00.183285 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.278014 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.278089 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.278108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.278841 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.278942 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.381629 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.381696 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.381720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.381794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.381821 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.485972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.486028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.486043 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.486066 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.486082 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.588457 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.588508 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.588523 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.588549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.588564 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.691743 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.691794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.691804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.691826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.691842 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.793996 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.794056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.794073 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.794096 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.794115 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.896659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.896729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.896742 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.896758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.896770 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.999861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.999902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.999913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.999930 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:00 crc kubenswrapper[4592]: I0929 16:52:00.999940 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:00Z","lastTransitionTime":"2025-09-29T16:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.102784 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.102838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.102856 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.102878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.102895 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.182877 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:01 crc kubenswrapper[4592]: E0929 16:52:01.183014 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.184728 4592 scope.go:117] "RemoveContainer" containerID="6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.198946 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.205915 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.205969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.205983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.206005 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.206020 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.220177 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.238538 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.249472 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.259981 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.269930 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.279072 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.291736 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.306586 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.308342 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.308377 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.308389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.308405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.308416 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.320322 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.332893 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.345105 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.361922 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.372067 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.382000 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.393951 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.411273 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.411314 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.411323 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.411337 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.411347 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.414276 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39fa761549f122ec72354878a5267d6fcc380befe89cadb7f139603eaa6e8cda\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:48Z\\\",\\\"message\\\":\\\"6:51:48.830011 5790 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0929 16:51:48.830168 5790 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830419 5790 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.830834 5790 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831093 5790 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831101 5790 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0929 16:51:48.831133 5790 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0929 16:51:48.831184 5790 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0929 16:51:48.831218 5790 factory.go:656] Stopping watch factory\\\\nI0929 16:51:48.831229 5790 handler.go:208] Removed *v1.EgressIP ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.429031 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.442054 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.456376 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.474855 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.492084 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.502526 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.513613 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.513658 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.513676 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.513692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.513703 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.515506 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.527449 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.540397 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.550504 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.569891 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.580043 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.606758 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.616364 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.616395 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.616406 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.616421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.616433 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.620619 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.630236 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.639219 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.648865 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:01Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.718765 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.718796 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.718807 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.718820 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.718830 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.821475 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.821502 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.821511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.821524 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.821534 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.924058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.924123 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.924139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.924212 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:01 crc kubenswrapper[4592]: I0929 16:52:01.924232 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:01Z","lastTransitionTime":"2025-09-29T16:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.027182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.027226 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.027237 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.027252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.027264 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.129719 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.129771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.129782 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.129802 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.129812 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.182770 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.182810 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.182823 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:02 crc kubenswrapper[4592]: E0929 16:52:02.182891 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:02 crc kubenswrapper[4592]: E0929 16:52:02.182993 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:02 crc kubenswrapper[4592]: E0929 16:52:02.183083 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.232451 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.232503 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.232514 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.232533 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.232545 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.335113 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.335159 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.335168 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.335180 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.335189 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.437679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.437717 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.437726 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.437740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.437749 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.524885 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/1.log" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.527978 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.528582 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.539824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.539866 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.539877 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.539893 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.539904 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.542261 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.557775 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.570286 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.581344 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.592233 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.602657 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.614342 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.625430 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.639318 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.641971 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.642018 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.642034 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.642055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.642071 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.653913 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.665575 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.676448 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.690106 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.707287 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.717781 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.730612 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.748428 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.748520 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.748537 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.748556 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.748572 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.763866 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:02Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.850509 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.850551 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.850562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.850578 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.850589 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.953080 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.953108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.953117 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.953128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.953137 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:02Z","lastTransitionTime":"2025-09-29T16:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:02 crc kubenswrapper[4592]: I0929 16:52:02.962282 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:02 crc kubenswrapper[4592]: E0929 16:52:02.962440 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:02 crc kubenswrapper[4592]: E0929 16:52:02.962496 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:18.962480816 +0000 UTC m=+69.110258497 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.056417 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.056458 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.056468 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.056505 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.056516 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.158781 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.158825 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.158833 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.158846 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.158855 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.182622 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.182827 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.261353 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.261399 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.261413 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.261430 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.261442 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.364533 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.364580 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.364593 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.364610 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.364620 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.468629 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.468686 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.468721 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.468748 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.468767 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.536863 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/2.log" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.537972 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/1.log" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.541382 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" exitCode=1 Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.541435 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.541481 4592 scope.go:117] "RemoveContainer" containerID="6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.542625 4592 scope.go:117] "RemoveContainer" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.542858 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.565688 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.571168 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.571238 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.571251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.571268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.571280 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.581323 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.597070 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.617473 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6393f276d7d8a7e44f928e42f9c623b2f67f9cb14133d095cf57fe1cbbc60b39\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:51:50Z\\\",\\\"message\\\":\\\":0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:51:50.231985 6025 lb_config.go:1031] Cluster endpoints for openshift-operator-lifecycle-manager/packageserver-service for network=default are: map[]\\\\nF0929 16:51:50.229762 6025 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:51:50Z is after 2025-08-24T17:21:41Z]\\\\nI09\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.637334 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.654399 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.668551 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.674001 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.674025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.674033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.674047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.674055 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.680235 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.690848 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.703326 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.717651 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.731091 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.741833 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.757611 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.770253 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.776362 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.776394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.776403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.776417 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.776426 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.784573 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.799014 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:03Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.871663 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.871784 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.871818 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.871844 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.871882 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872032 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872059 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872071 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872120 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:35.87210509 +0000 UTC m=+86.019882771 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872408 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872444 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872480 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872503 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872484 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:35.872469292 +0000 UTC m=+86.020246973 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872566 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:35.872560004 +0000 UTC m=+86.020337685 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872577 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:52:35.872571725 +0000 UTC m=+86.020349406 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872596 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: E0929 16:52:03.872644 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:35.872626776 +0000 UTC m=+86.020404487 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.878198 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.878239 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.878261 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.878286 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.878340 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.983228 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.983258 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.983268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.983284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:03 crc kubenswrapper[4592]: I0929 16:52:03.983294 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:03Z","lastTransitionTime":"2025-09-29T16:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.085844 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.085960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.085987 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.086016 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.086036 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.182358 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.182377 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.182488 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:04 crc kubenswrapper[4592]: E0929 16:52:04.182647 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:04 crc kubenswrapper[4592]: E0929 16:52:04.182730 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:04 crc kubenswrapper[4592]: E0929 16:52:04.182792 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.188034 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.188086 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.188104 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.188127 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.188180 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.290726 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.290769 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.290778 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.290791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.290800 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.393542 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.393587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.393600 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.393618 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.393629 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.496592 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.496644 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.496653 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.496670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.496682 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.546726 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/2.log" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.550004 4592 scope.go:117] "RemoveContainer" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" Sep 29 16:52:04 crc kubenswrapper[4592]: E0929 16:52:04.550241 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.565018 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.578665 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.593872 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.602047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.602089 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.602100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.602115 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.602129 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.617219 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.628831 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.639846 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.650049 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.662297 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.680122 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.691187 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705190 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705232 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705243 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705256 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705265 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.705298 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.717509 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.727648 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.738493 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.750488 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.760794 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.770445 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:04Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.807419 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.807456 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.807467 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.807483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.807494 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.910995 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.911039 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.911051 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.911066 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:04 crc kubenswrapper[4592]: I0929 16:52:04.911077 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:04Z","lastTransitionTime":"2025-09-29T16:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.012960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.012991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.012999 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.013014 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.013023 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.116387 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.116474 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.116497 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.116524 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.116546 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.182991 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:05 crc kubenswrapper[4592]: E0929 16:52:05.183120 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.218457 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.218513 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.218528 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.218550 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.218565 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.321950 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.322015 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.322038 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.322057 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.322069 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.425055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.425098 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.425108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.425132 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.425141 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.527500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.527560 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.527580 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.527605 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.527623 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.630854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.630897 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.630906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.630921 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.630933 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.734072 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.734131 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.734198 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.734256 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.734281 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.837234 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.837287 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.837301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.837317 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.837329 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.940237 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.940292 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.940311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.940334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:05 crc kubenswrapper[4592]: I0929 16:52:05.940349 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:05Z","lastTransitionTime":"2025-09-29T16:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.043626 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.043668 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.043676 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.043691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.043704 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.146610 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.146654 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.146667 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.146681 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.146694 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.182586 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.182629 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.182588 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:06 crc kubenswrapper[4592]: E0929 16:52:06.182714 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:06 crc kubenswrapper[4592]: E0929 16:52:06.182815 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:06 crc kubenswrapper[4592]: E0929 16:52:06.182894 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.249932 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.250000 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.250011 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.250028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.250039 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.352021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.352062 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.352071 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.352085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.352094 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.454657 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.454705 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.454720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.454737 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.454753 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.558846 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.558886 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.558897 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.558913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.558921 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.660973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.661015 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.661024 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.661038 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.661048 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.767552 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.767793 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.767851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.767919 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.767974 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.870199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.870498 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.870596 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.870679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.870766 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.977617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.977660 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.977673 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.977692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:06 crc kubenswrapper[4592]: I0929 16:52:06.977708 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:06Z","lastTransitionTime":"2025-09-29T16:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.080546 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.080651 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.080677 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.080712 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.080731 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.182030 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:07 crc kubenswrapper[4592]: E0929 16:52:07.182879 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.183354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.183379 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.183387 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.183400 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.183410 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.285375 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.285417 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.285426 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.285440 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.285449 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.387578 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.387671 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.387681 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.387699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.387708 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.489956 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.490283 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.490398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.490492 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.490576 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.605439 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.605495 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.605511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.605532 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.605546 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.707521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.707831 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.708036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.708238 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.708386 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.811022 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.811076 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.811087 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.811105 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.811119 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.913547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.913584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.913591 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.913605 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:07 crc kubenswrapper[4592]: I0929 16:52:07.913615 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:07Z","lastTransitionTime":"2025-09-29T16:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.016300 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.016338 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.016347 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.016362 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.016371 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.118899 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.119204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.119295 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.119386 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.119477 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.182438 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.182478 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.182604 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.182747 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.182801 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.182863 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.221397 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.221442 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.221454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.221471 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.221483 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.324550 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.324933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.325092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.325296 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.325459 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.429073 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.429466 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.429611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.429773 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.429911 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.506757 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.506821 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.506851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.506904 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.506930 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.531656 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:08Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.537564 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.537634 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.537659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.537687 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.537709 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.556951 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:08Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.567382 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.567436 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.567450 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.567471 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.567487 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.580765 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:08Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.584249 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.584317 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.584332 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.584349 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.584363 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.600977 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:08Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.604585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.604624 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.604635 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.604670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.604682 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.616614 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:08Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:08 crc kubenswrapper[4592]: E0929 16:52:08.616736 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.618085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.618108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.618116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.618129 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.618160 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.720507 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.720545 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.720562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.720577 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.720625 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.823801 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.824061 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.824241 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.824401 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.824497 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.927990 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.928446 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.928847 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.929291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:08 crc kubenswrapper[4592]: I0929 16:52:08.929546 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:08Z","lastTransitionTime":"2025-09-29T16:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.032671 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.032709 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.032718 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.032732 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.032741 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.135495 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.135540 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.135551 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.135567 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.135581 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.182514 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:09 crc kubenswrapper[4592]: E0929 16:52:09.182693 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.238567 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.238610 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.238621 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.238637 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.238648 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.341523 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.341562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.341578 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.341599 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.341616 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.444120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.444442 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.444516 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.444575 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.444635 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.547132 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.547461 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.547646 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.547885 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.548063 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.651870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.651919 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.651928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.651946 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.651957 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.755038 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.755098 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.755112 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.755133 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.755166 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.857549 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.857654 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.857667 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.857690 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.857706 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.960695 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.961136 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.961243 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.961365 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:09 crc kubenswrapper[4592]: I0929 16:52:09.961430 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:09Z","lastTransitionTime":"2025-09-29T16:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.065269 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.065315 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.065325 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.065344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.065357 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.167958 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.167999 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.168007 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.168021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.168035 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.182245 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.182279 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.182245 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:10 crc kubenswrapper[4592]: E0929 16:52:10.182396 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:10 crc kubenswrapper[4592]: E0929 16:52:10.182512 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:10 crc kubenswrapper[4592]: E0929 16:52:10.182586 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.270335 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.270401 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.270421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.270452 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.270473 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.373301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.373342 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.373354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.373370 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.373381 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.476131 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.476199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.476211 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.476229 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.476242 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.578546 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.578808 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.578934 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.579067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.579170 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.681866 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.682163 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.682288 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.682450 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.682581 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.748970 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.759767 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.770242 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.781731 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.784883 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.784931 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.784939 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.784952 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.784961 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.794813 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.811360 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.833029 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.843920 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.868046 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.881912 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.886913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.886969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.886980 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.886994 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.887005 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.893746 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.907850 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.921084 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.936748 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.952130 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.971786 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.986825 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:10Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.989606 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.989681 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.989697 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.989717 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:10 crc kubenswrapper[4592]: I0929 16:52:10.989730 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:10Z","lastTransitionTime":"2025-09-29T16:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.008008 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.092201 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.092240 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.092252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.092268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.092278 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.183020 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:11 crc kubenswrapper[4592]: E0929 16:52:11.183188 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.195581 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.196677 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.196703 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.196713 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.196727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.197440 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.213857 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.230338 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.244710 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.258038 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.268864 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.279519 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.292193 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.299430 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.299455 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.299463 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.299475 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.299485 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.305615 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.317798 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.329246 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.343309 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.357344 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.379457 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.396460 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.401890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.402164 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.402268 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.402350 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.402452 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.408862 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.421363 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:11Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.503956 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.504034 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.504045 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.504061 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.504072 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.606680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.606717 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.606727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.606741 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.606752 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.709372 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.709437 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.709456 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.709479 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.709498 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.812432 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.812487 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.812500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.812518 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.812531 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.915655 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.915701 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.915716 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.915737 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:11 crc kubenswrapper[4592]: I0929 16:52:11.915751 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:11Z","lastTransitionTime":"2025-09-29T16:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.017740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.017767 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.017774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.017786 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.017794 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.120559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.120592 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.120602 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.120618 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.120629 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.182464 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:12 crc kubenswrapper[4592]: E0929 16:52:12.182774 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.182590 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:12 crc kubenswrapper[4592]: E0929 16:52:12.182996 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.182491 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:12 crc kubenswrapper[4592]: E0929 16:52:12.183203 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.223591 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.223635 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.223646 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.223664 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.223679 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.325775 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.325811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.325820 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.325832 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.325840 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.428482 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.428761 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.428826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.428899 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.428981 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.531406 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.531451 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.531465 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.531482 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.531495 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.633992 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.634029 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.634039 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.634054 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.634066 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.735969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.736009 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.736020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.736035 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.736046 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.837960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.838009 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.838020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.838039 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.838052 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.939908 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.939938 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.939945 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.939957 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:12 crc kubenswrapper[4592]: I0929 16:52:12.939969 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:12Z","lastTransitionTime":"2025-09-29T16:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.042457 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.042603 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.042622 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.042639 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.042653 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.145225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.145262 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.145272 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.145284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.145292 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.182565 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:13 crc kubenswrapper[4592]: E0929 16:52:13.182720 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.247894 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.247960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.247973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.247991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.248004 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.350842 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.350881 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.350891 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.350905 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.350919 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.453295 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.453356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.453371 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.453396 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.453411 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.555521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.555562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.555574 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.555591 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.555619 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.657795 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.657830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.657840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.657853 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.657862 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.759792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.759824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.759832 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.759863 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.759871 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.862412 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.862443 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.862469 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.862484 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.862493 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.964481 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.964702 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.964765 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.964826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:13 crc kubenswrapper[4592]: I0929 16:52:13.964920 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:13Z","lastTransitionTime":"2025-09-29T16:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.066742 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.066779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.066792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.066806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.066817 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.168474 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.168504 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.168512 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.168526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.168534 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.182185 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.182217 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.182242 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:14 crc kubenswrapper[4592]: E0929 16:52:14.182478 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:14 crc kubenswrapper[4592]: E0929 16:52:14.182793 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:14 crc kubenswrapper[4592]: E0929 16:52:14.183102 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.271594 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.271649 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.271660 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.271678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.271688 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.374221 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.374257 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.374266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.374281 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.374293 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.476790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.476830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.476841 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.476858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.476868 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.585077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.585161 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.585171 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.585187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.585197 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.687383 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.687651 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.687772 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.687894 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.688069 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.790525 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.790824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.790917 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.791012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.791122 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.893863 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.894193 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.894294 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.894358 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.894432 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.996961 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.997270 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.997350 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.997431 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:14 crc kubenswrapper[4592]: I0929 16:52:14.997506 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:14Z","lastTransitionTime":"2025-09-29T16:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.100271 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.100521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.100640 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.100826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.100979 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.182120 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:15 crc kubenswrapper[4592]: E0929 16:52:15.182508 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.203334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.203559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.203614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.203674 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.203730 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.306102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.306157 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.306166 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.306180 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.306191 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.408358 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.408402 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.408413 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.408428 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.408438 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.510253 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.510285 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.510294 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.510306 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.510315 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.613475 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.613750 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.613840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.613971 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.614052 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.716752 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.716792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.716801 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.716815 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.716823 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.818631 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.818940 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.819040 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.819107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.819205 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.922056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.922122 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.922133 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.922271 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:15 crc kubenswrapper[4592]: I0929 16:52:15.922292 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:15Z","lastTransitionTime":"2025-09-29T16:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.024552 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.024598 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.024609 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.024624 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.024637 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.127204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.127476 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.127535 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.127617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.127690 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.183123 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:16 crc kubenswrapper[4592]: E0929 16:52:16.183571 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.183435 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:16 crc kubenswrapper[4592]: E0929 16:52:16.183810 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.183394 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:16 crc kubenswrapper[4592]: E0929 16:52:16.184078 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.231576 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.231615 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.231625 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.231639 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.231652 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.333352 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.333854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.333936 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.334067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.334175 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.436656 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.437537 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.437742 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.437878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.437994 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.539746 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.539784 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.539795 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.539811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.539821 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.642883 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.643179 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.643283 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.643367 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.643440 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.746386 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.746431 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.746444 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.746460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.746472 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.849945 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.849993 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.850006 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.850024 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.850038 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.952046 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.952081 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.952089 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.952102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:16 crc kubenswrapper[4592]: I0929 16:52:16.952113 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:16Z","lastTransitionTime":"2025-09-29T16:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.054654 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.054691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.054701 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.054716 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.054727 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.156608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.156650 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.156664 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.156682 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.156692 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.182646 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:17 crc kubenswrapper[4592]: E0929 16:52:17.182842 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.259448 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.259498 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.259509 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.259527 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.259539 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.361983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.362028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.362059 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.362076 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.362088 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.464223 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.464263 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.464272 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.464286 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.464295 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.566718 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.566760 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.566771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.566790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.566801 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.668639 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.668689 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.668703 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.668720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.668737 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.770624 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.770668 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.770679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.770697 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.770708 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.873077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.873112 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.873122 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.873136 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.873174 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.974787 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.974811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.974819 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.974830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:17 crc kubenswrapper[4592]: I0929 16:52:17.974839 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:17Z","lastTransitionTime":"2025-09-29T16:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.077076 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.077107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.077115 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.077126 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.077135 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.178814 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.178849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.178862 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.178878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.178892 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.182611 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.182729 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.182725 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.182841 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.182881 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.183321 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.183512 4592 scope.go:117] "RemoveContainer" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.183753 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.281355 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.281394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.281405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.281421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.281432 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.384965 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.384998 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.385009 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.385023 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.385035 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.486806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.486861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.486880 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.486903 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.486920 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.589409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.589433 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.589441 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.589453 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.589461 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.692021 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.692046 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.692053 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.692066 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.692074 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.794272 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.794307 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.794317 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.794352 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.794364 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.896623 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.896648 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.896657 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.896668 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.896676 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.945924 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.945954 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.945962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.945973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.945984 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.956940 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:18Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.959768 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.959785 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.959792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.959803 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.959811 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.973913 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:18Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.977296 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.977331 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.977346 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.977366 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.977382 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:18 crc kubenswrapper[4592]: E0929 16:52:18.990094 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:18Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.993782 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.993839 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.993851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.993866 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:18 crc kubenswrapper[4592]: I0929 16:52:18.993877 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:18Z","lastTransitionTime":"2025-09-29T16:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.007568 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:19Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.013529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.013581 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.013597 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.013616 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.013635 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.026378 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:19Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.026530 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.028780 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.028827 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.028839 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.028852 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.028878 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.051514 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.051642 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.051698 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:52:51.051684607 +0000 UTC m=+101.199462288 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.130954 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.131037 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.131061 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.131091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.131182 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.182472 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:19 crc kubenswrapper[4592]: E0929 16:52:19.182613 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.233450 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.233487 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.233498 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.233512 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.233524 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.335807 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.335855 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.335870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.335887 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.335898 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.437890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.437940 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.437953 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.437969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.437979 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.541534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.541585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.541601 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.541621 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.541634 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.643455 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.643508 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.643519 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.643534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.643545 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.747002 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.747091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.747115 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.747269 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.747309 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.851025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.851059 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.851067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.851081 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.851091 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.954107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.954219 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.954241 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.954271 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:19 crc kubenswrapper[4592]: I0929 16:52:19.954293 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:19Z","lastTransitionTime":"2025-09-29T16:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.057959 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.058030 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.058051 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.058075 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.058098 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.161460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.161524 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.161546 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.161575 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.161597 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.182513 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.182536 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.182579 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:20 crc kubenswrapper[4592]: E0929 16:52:20.182633 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:20 crc kubenswrapper[4592]: E0929 16:52:20.182720 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:20 crc kubenswrapper[4592]: E0929 16:52:20.182831 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.264560 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.264608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.264622 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.264652 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.264669 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.367030 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.367058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.367067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.367079 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.367088 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.469302 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.469338 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.469348 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.469363 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.469375 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.572084 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.572123 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.572131 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.572164 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.572174 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.674603 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.674646 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.674660 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.674678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.674690 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.776638 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.776865 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.776952 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.777036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.777159 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.878811 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.878859 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.878872 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.878890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.878905 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.982790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.982865 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.982898 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.982929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:20 crc kubenswrapper[4592]: I0929 16:52:20.982949 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:20Z","lastTransitionTime":"2025-09-29T16:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.084882 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.085123 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.085207 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.085277 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.085340 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.183023 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:21 crc kubenswrapper[4592]: E0929 16:52:21.184125 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.193240 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.193281 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.193291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.193307 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.193319 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.202965 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.214097 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.225656 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.237160 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.249191 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.260854 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.279278 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.290398 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.295389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.295476 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.295487 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.295524 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.295536 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.305015 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.318892 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.335814 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.350409 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.363957 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.375002 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.387828 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.397846 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.397880 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.397888 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.397902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.397912 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.404494 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.413717 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:21Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.500859 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.500933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.500951 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.500977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.500994 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.603249 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.603289 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.603298 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.603310 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.603320 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.705631 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.705668 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.705678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.705851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.705870 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.808274 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.808319 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.808329 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.808344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.808355 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.910587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.910960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.911204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.911408 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:21 crc kubenswrapper[4592]: I0929 16:52:21.911664 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:21Z","lastTransitionTime":"2025-09-29T16:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.014047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.014094 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.014105 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.014120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.014129 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.117054 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.117097 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.117108 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.117125 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.117137 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.182845 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:22 crc kubenswrapper[4592]: E0929 16:52:22.182956 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.182852 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.183178 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:22 crc kubenswrapper[4592]: E0929 16:52:22.183179 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:22 crc kubenswrapper[4592]: E0929 16:52:22.183361 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.219880 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.219933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.219942 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.219957 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.219967 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.322643 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.322688 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.322699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.322715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.322728 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.425176 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.425248 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.425259 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.425274 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.425283 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.530669 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.530927 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.530937 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.530969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.530979 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.607808 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/0.log" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.607860 4592 generic.go:334] "Generic (PLEG): container finished" podID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" containerID="8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe" exitCode=1 Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.607891 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerDied","Data":"8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.608255 4592 scope.go:117] "RemoveContainer" containerID="8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.633613 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.633640 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.633651 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.633665 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.633675 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.643326 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.657162 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.673425 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.687530 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.703049 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.715387 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.730464 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.735252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.735526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.735534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.735548 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.735557 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.742775 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.754938 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.765893 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.776879 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.785858 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.800698 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.816220 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.825867 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.837790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.837829 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.837840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.837858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.837869 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.854213 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.881374 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:22Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.940336 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.940376 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.940387 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.940400 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:22 crc kubenswrapper[4592]: I0929 16:52:22.940410 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:22Z","lastTransitionTime":"2025-09-29T16:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.042686 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.042727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.042740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.042754 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.042763 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.145360 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.145440 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.145454 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.145469 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.145481 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.182744 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:23 crc kubenswrapper[4592]: E0929 16:52:23.182901 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.247592 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.247624 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.247632 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.247644 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.247652 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.350044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.350077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.350085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.350100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.350109 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.453558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.453614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.453626 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.453652 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.453663 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.556568 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.556617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.556629 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.556648 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.556660 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.611248 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/0.log" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.611296 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerStarted","Data":"635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.623349 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.633171 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.643812 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.656832 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.658471 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.658511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.658523 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.658539 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.658550 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.669876 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.680614 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.692113 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.704929 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.726207 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.738333 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.749599 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.761057 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.761092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.761102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.761117 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.761127 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.763103 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.773786 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.782799 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.792983 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.802568 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.811271 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:23Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.862913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.863116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.863220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.863289 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.863345 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.966259 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.966291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.966301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.966316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:23 crc kubenswrapper[4592]: I0929 16:52:23.966328 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:23Z","lastTransitionTime":"2025-09-29T16:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.069102 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.069164 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.069177 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.069195 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.069208 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.171630 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.171671 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.171680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.171693 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.171702 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.182928 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.183091 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.183341 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:24 crc kubenswrapper[4592]: E0929 16:52:24.183437 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:24 crc kubenswrapper[4592]: E0929 16:52:24.183374 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:24 crc kubenswrapper[4592]: E0929 16:52:24.183677 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.274975 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.275025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.275037 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.275057 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.275069 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.377244 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.377281 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.377312 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.377325 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.377333 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.479704 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.479763 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.479779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.479794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.479805 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.581868 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.581913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.581924 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.581939 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.581951 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.683585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.683614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.683621 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.683633 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.683642 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.787072 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.787128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.787139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.787182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.787194 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.889865 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.889913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.889928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.889948 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.889964 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.991932 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.992301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.992405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.992491 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:24 crc kubenswrapper[4592]: I0929 16:52:24.992557 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:24Z","lastTransitionTime":"2025-09-29T16:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.094458 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.094500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.094508 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.094524 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.094534 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.182978 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:25 crc kubenswrapper[4592]: E0929 16:52:25.183555 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.196067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.196123 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.196134 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.196175 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.196188 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.298252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.298284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.298292 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.298307 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.298316 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.400383 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.400441 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.400453 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.400470 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.400482 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.502410 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.502443 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.502451 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.502465 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.502474 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.605402 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.605459 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.605473 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.605492 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.605504 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.708001 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.708045 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.708057 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.708074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.708111 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.810318 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.810349 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.810361 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.810378 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.810391 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.913997 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.914032 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.914041 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.914055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:25 crc kubenswrapper[4592]: I0929 16:52:25.914065 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:25Z","lastTransitionTime":"2025-09-29T16:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.016884 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.016917 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.016925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.016936 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.016944 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.119498 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.119544 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.119558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.119575 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.119590 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.182065 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:26 crc kubenswrapper[4592]: E0929 16:52:26.182240 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.182317 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:26 crc kubenswrapper[4592]: E0929 16:52:26.182376 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.182418 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:26 crc kubenswrapper[4592]: E0929 16:52:26.182467 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.226063 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.226109 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.226123 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.226163 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.226183 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.328605 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.328647 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.328662 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.328677 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.328688 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.430874 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.430907 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.430918 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.430933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.430944 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.533531 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.533598 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.533608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.533622 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.533630 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.635743 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.635830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.635845 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.635864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.635878 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.738032 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.738067 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.738079 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.738095 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.738109 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.840710 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.840756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.840769 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.840786 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.840797 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.942740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.942771 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.942779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.942793 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:26 crc kubenswrapper[4592]: I0929 16:52:26.942802 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:26Z","lastTransitionTime":"2025-09-29T16:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.046376 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.046446 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.046459 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.046474 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.046509 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.149295 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.149376 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.149387 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.149403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.149411 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.183299 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:27 crc kubenswrapper[4592]: E0929 16:52:27.183625 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.251859 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.251902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.251913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.251928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.251939 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.354452 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.354499 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.354510 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.354526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.354540 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.456758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.456797 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.456808 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.456822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.456834 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.558474 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.558507 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.558515 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.558528 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.558537 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.661094 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.661119 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.661127 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.661138 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.661169 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.765484 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.765526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.765536 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.765552 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.765562 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.867754 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.867822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.867843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.867870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.867932 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.970799 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.970839 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.970848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.970861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:27 crc kubenswrapper[4592]: I0929 16:52:27.970870 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:27Z","lastTransitionTime":"2025-09-29T16:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.073858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.073918 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.073929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.073949 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.073960 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.175928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.175969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.175977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.175991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.175999 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.181975 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:28 crc kubenswrapper[4592]: E0929 16:52:28.182089 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.181996 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.181993 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:28 crc kubenswrapper[4592]: E0929 16:52:28.182190 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:28 crc kubenswrapper[4592]: E0929 16:52:28.182310 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.278922 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.278986 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.279001 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.279022 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.279036 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.382098 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.382198 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.382221 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.382251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.382274 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.485028 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.485086 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.485104 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.485120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.485134 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.589688 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.589766 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.589792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.589838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.589862 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.693470 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.693527 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.693544 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.693572 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.693595 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.796441 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.796521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.796548 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.796577 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.796596 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.899896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.899953 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.899982 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.900004 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:28 crc kubenswrapper[4592]: I0929 16:52:28.900021 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:28Z","lastTransitionTime":"2025-09-29T16:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.003539 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.003670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.003691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.003715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.003733 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.106725 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.106831 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.106847 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.106942 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.106961 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.182883 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.183076 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.211555 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.211594 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.211603 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.211627 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.211637 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.274099 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.274139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.274167 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.274183 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.274197 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.286953 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:29Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.290569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.290610 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.290621 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.290633 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.290642 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.301098 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:29Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.304567 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.304601 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.304609 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.304623 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.304636 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.318758 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:29Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.322453 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.322473 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.322500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.322513 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.322521 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.333266 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:29Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.336922 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.336954 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.336963 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.336977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.336986 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.347198 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:29Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:29 crc kubenswrapper[4592]: E0929 16:52:29.347333 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.348997 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.349040 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.349056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.349077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.349086 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.451128 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.451179 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.451187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.451200 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.451209 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.553598 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.553638 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.553650 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.553664 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.553674 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.655656 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.655762 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.655861 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.655892 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.655913 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.758285 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.758334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.758344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.758357 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.758367 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.860354 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.860382 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.860390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.860402 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.860411 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.963513 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.963589 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.963611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.963639 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:29 crc kubenswrapper[4592]: I0929 16:52:29.963659 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:29Z","lastTransitionTime":"2025-09-29T16:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.066251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.066316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.066331 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.066353 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.066368 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.168848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.168900 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.168910 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.168925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.168936 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.182637 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:30 crc kubenswrapper[4592]: E0929 16:52:30.182814 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.182669 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:30 crc kubenswrapper[4592]: E0929 16:52:30.182925 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.182640 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:30 crc kubenswrapper[4592]: E0929 16:52:30.183012 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.272539 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.272595 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.272612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.272634 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.272650 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.375064 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.375120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.375130 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.375173 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.375185 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.477433 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.477485 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.477495 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.477512 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.477870 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.579695 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.579725 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.579733 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.579746 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.579754 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.682726 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.682768 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.682779 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.682794 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.682805 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.785695 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.785739 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.785751 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.785768 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.785780 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.888242 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.888301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.888318 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.888343 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.888371 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.992545 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.992678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.992753 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.992787 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:30 crc kubenswrapper[4592]: I0929 16:52:30.992858 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:30Z","lastTransitionTime":"2025-09-29T16:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.095220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.095260 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.095270 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.095288 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.095298 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.183036 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:31 crc kubenswrapper[4592]: E0929 16:52:31.183190 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.196538 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.197991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.198020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.198031 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.198045 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.198056 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.207624 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.217752 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.234298 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.246580 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.257878 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.269194 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.284891 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.294819 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.300965 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.300997 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.301009 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.301024 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.301035 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.306762 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.316527 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.326860 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.336009 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.348684 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.359747 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.371218 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.388932 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:31Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.403563 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.403598 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.403606 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.403622 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.403631 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.506737 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.506791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.506808 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.506832 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.506849 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.609309 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.609356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.609370 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.609389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.609401 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.711593 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.711642 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.711655 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.711674 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.711687 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.814026 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.814058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.814069 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.814084 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.814095 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.916618 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.916674 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.916687 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.916704 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:31 crc kubenswrapper[4592]: I0929 16:52:31.916717 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:31Z","lastTransitionTime":"2025-09-29T16:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.019484 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.019532 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.019546 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.019582 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.019597 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.123398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.124113 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.124140 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.124186 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.124197 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.182415 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.182416 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.182471 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:32 crc kubenswrapper[4592]: E0929 16:52:32.182641 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:32 crc kubenswrapper[4592]: E0929 16:52:32.182746 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:32 crc kubenswrapper[4592]: E0929 16:52:32.182815 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.227972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.228030 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.228222 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.228242 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.228253 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.330001 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.330060 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.330069 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.330081 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.330090 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.432045 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.432087 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.432100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.432133 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.432174 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.534692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.534729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.534740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.534756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.534767 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.636556 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.636619 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.636635 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.636657 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.636675 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.740244 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.740301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.740316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.740334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.740347 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.843079 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.843179 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.843192 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.843210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.843220 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.945833 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.945901 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.945915 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.945930 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:32 crc kubenswrapper[4592]: I0929 16:52:32.945939 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:32Z","lastTransitionTime":"2025-09-29T16:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.049461 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.049536 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.049558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.049587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.049608 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.151968 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.152010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.152020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.152035 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.152049 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.182375 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:33 crc kubenswrapper[4592]: E0929 16:52:33.182575 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.183611 4592 scope.go:117] "RemoveContainer" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.199204 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.254350 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.254398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.254408 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.254422 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.254432 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.356751 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.356785 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.356803 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.356822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.356833 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.459858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.459898 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.459911 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.459925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.459936 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.562114 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.562179 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.562195 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.562218 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.562231 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.668309 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.668362 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.668379 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.668405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.668422 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.771605 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.771946 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.772041 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.772178 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.772288 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.875008 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.875033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.875041 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.875053 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.875061 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.977471 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.977501 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.977511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.977530 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:33 crc kubenswrapper[4592]: I0929 16:52:33.977542 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:33Z","lastTransitionTime":"2025-09-29T16:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.079658 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.080210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.080242 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.080261 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.080281 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.182096 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.182138 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:34 crc kubenswrapper[4592]: E0929 16:52:34.182226 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.182248 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:34 crc kubenswrapper[4592]: E0929 16:52:34.182386 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:34 crc kubenswrapper[4592]: E0929 16:52:34.182451 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.183063 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.183083 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.183091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.183103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.183111 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.286012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.286070 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.286086 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.286109 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.286127 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.389793 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.389846 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.389868 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.389896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.389918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.492047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.492096 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.492109 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.492127 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.492139 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.594305 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.594334 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.594344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.594358 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.594369 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.651312 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/2.log" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.663363 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.664220 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.677774 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.686981 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.696339 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.699997 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.700036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.700046 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.700063 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.700074 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.717222 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.731090 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.739946 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.749970 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.758674 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.768595 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.780201 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.794752 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.801867 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.801890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.801898 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.801910 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.801918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.819227 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b151d75f-29cf-4412-9252-6cf067c29b75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.833186 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.844588 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.857567 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.869528 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.887287 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.898027 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:34Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.904238 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.904403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.904478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.904547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:34 crc kubenswrapper[4592]: I0929 16:52:34.904611 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:34Z","lastTransitionTime":"2025-09-29T16:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.006316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.006356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.006369 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.006383 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.006394 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.109452 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.109528 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.109543 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.109569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.109591 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.183105 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.183304 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.212279 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.212320 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.212332 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.212348 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.212359 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.314636 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.314678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.314691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.314707 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.314728 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.418107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.418220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.418245 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.418269 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.418284 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.520849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.520890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.520899 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.520913 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.520921 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.623225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.623311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.623337 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.623366 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.623387 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.669510 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/3.log" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.670505 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/2.log" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.674510 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" exitCode=1 Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.674699 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.674868 4592 scope.go:117] "RemoveContainer" containerID="d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.676098 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.676556 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.698731 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.720014 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.726937 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.727010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.727030 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.727056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.727075 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.742048 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.756408 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.774598 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b151d75f-29cf-4412-9252-6cf067c29b75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.788459 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.803815 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.815797 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.829854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.829911 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.829932 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.829955 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.829974 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.830958 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.847321 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d431c05f9cb13875e252c052e777e71f3fc9326e196b09d19685b98b2139dcf7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:02Z\\\",\\\"message\\\":\\\"411 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {eb8eef51-1a8d-43f9-ae2e-3b2cc00ded60}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438329 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438446 6190 kube.go:317] Updating pod openshift-multus/network-metrics-daemon-qvsjc\\\\nI0929 16:52:02.438482 6190 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0929 16:52:02.438501 6190 model_client.go:382] Update operations generated as: [{Op:update Table:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:35Z\\\",\\\"message\\\":\\\"zation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z]\\\\nI0929 16:52:35.072098 6596 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"c0c2f725-e461-454e-a88c-c8350d62e1ef\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication/oauth-openshift\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.867128 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.875400 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.875498 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.875525 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.875544 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875625 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.875593585 +0000 UTC m=+150.023371276 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875644 4592 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.875665 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875715 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.875696258 +0000 UTC m=+150.023473939 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875716 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875740 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875744 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875753 4592 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875759 4592 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875763 4592 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875633 4592 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875788 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.875780741 +0000 UTC m=+150.023558422 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875831 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.875818172 +0000 UTC m=+150.023595853 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Sep 29 16:52:35 crc kubenswrapper[4592]: E0929 16:52:35.875844 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.875837122 +0000 UTC m=+150.023614803 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.881605 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.891094 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.900043 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.909673 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.920213 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.932463 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.932488 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.932496 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.932509 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.932518 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:35Z","lastTransitionTime":"2025-09-29T16:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.933298 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:35 crc kubenswrapper[4592]: I0929 16:52:35.943913 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.034821 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.034878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.034897 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.034921 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.034938 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.137918 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.137972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.137989 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.138010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.138027 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.182724 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:36 crc kubenswrapper[4592]: E0929 16:52:36.182936 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.182724 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.182724 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:36 crc kubenswrapper[4592]: E0929 16:52:36.183097 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:36 crc kubenswrapper[4592]: E0929 16:52:36.183316 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.240410 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.240466 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.240483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.240505 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.240517 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.343192 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.343226 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.343236 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.343251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.343261 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.450489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.450774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.450908 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.451046 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.451248 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.554671 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.554729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.554749 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.554775 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.554797 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.656946 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.657019 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.657041 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.657364 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.657387 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.679532 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/3.log" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.683917 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 16:52:36 crc kubenswrapper[4592]: E0929 16:52:36.684487 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.701465 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.717456 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.730040 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.746870 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.761185 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.761266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.761288 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.761312 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.761331 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.764684 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.783276 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.817170 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b151d75f-29cf-4412-9252-6cf067c29b75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.831180 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.845485 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.862588 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.863294 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.863395 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.863507 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.863603 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.863692 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.875870 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.895881 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:35Z\\\",\\\"message\\\":\\\"zation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z]\\\\nI0929 16:52:35.072098 6596 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"c0c2f725-e461-454e-a88c-c8350d62e1ef\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication/oauth-openshift\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.907597 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.918081 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.926441 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.935751 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.945470 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.956835 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:36Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.966281 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.966332 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.966344 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.966362 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:36 crc kubenswrapper[4592]: I0929 16:52:36.966374 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:36Z","lastTransitionTime":"2025-09-29T16:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.068271 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.068301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.068309 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.068320 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.068327 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.170970 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.171324 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.171460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.171575 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.171681 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.182522 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:37 crc kubenswrapper[4592]: E0929 16:52:37.182753 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.274010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.274092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.274110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.274194 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.274216 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.376130 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.376173 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.376182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.376194 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.376203 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.478833 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.478864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.478872 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.478885 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.478895 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.582476 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.582532 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.582550 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.582568 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.582582 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.685267 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.685318 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.685333 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.685356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.685373 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.787914 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.787985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.787998 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.788016 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.788028 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.890895 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.890966 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.890976 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.891020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.891035 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.993036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.993088 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.993103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.993124 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:37 crc kubenswrapper[4592]: I0929 16:52:37.993140 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:37Z","lastTransitionTime":"2025-09-29T16:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.095467 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.095507 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.095519 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.095535 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.095545 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.182594 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.182765 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.182762 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:38 crc kubenswrapper[4592]: E0929 16:52:38.182943 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:38 crc kubenswrapper[4592]: E0929 16:52:38.183199 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:38 crc kubenswrapper[4592]: E0929 16:52:38.183252 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.194832 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.198557 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.198603 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.198614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.198631 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.198642 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.300706 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.300747 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.300758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.300774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.300785 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.404834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.404879 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.404892 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.404907 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.404918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.506636 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.506680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.506693 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.506709 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.506722 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.610125 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.610251 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.610321 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.610351 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.610416 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.713323 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.713390 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.713408 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.713432 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.713451 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.815761 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.815798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.815809 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.815826 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.815843 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.918785 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.918839 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.918855 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.918877 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:38 crc kubenswrapper[4592]: I0929 16:52:38.918898 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:38Z","lastTransitionTime":"2025-09-29T16:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.020933 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.020971 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.020980 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.020992 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.021001 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.123080 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.123182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.123197 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.123215 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.123227 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.182690 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.182871 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.225195 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.225237 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.225248 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.225267 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.225277 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.327645 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.327691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.327702 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.327718 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.327727 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.430241 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.430285 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.430296 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.430311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.430323 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.526171 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.526204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.526215 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.526229 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.526238 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.538116 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.541132 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.541181 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.541190 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.541204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.541216 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.551422 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.554906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.554950 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.554963 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.554979 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.554990 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.571567 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.575875 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.575926 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.575938 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.575974 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.575991 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.588749 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.592252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.592298 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.592311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.592328 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.592340 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.606453 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:39Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:39 crc kubenswrapper[4592]: E0929 16:52:39.606621 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.608319 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.608359 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.608371 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.608389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.608402 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.710735 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.710780 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.710791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.710806 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.710818 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.813356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.813398 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.813409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.813431 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.813443 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.915970 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.916014 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.916025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.916044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:39 crc kubenswrapper[4592]: I0929 16:52:39.916057 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:39Z","lastTransitionTime":"2025-09-29T16:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.018799 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.018842 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.018851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.018870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.018880 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.120777 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.120817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.120824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.120838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.120847 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.182066 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:40 crc kubenswrapper[4592]: E0929 16:52:40.182256 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.182267 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:40 crc kubenswrapper[4592]: E0929 16:52:40.182348 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.182503 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:40 crc kubenswrapper[4592]: E0929 16:52:40.182577 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.223434 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.223477 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.223489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.223503 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.223513 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.325544 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.325584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.325596 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.325611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.325622 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.428031 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.428068 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.428078 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.428091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.428100 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.530608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.530650 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.530659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.530673 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.530687 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.633451 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.633489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.633497 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.633509 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.633517 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.735935 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.735979 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.735992 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.736010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.736023 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.838942 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.838975 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.838984 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.838996 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.839003 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.941478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.941519 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.941529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.941543 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:40 crc kubenswrapper[4592]: I0929 16:52:40.941554 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:40Z","lastTransitionTime":"2025-09-29T16:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.044560 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.044608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.044623 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.044642 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.044653 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.146977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.147026 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.147062 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.147079 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.147091 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.182597 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:41 crc kubenswrapper[4592]: E0929 16:52:41.182788 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.194886 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.216512 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:35Z\\\",\\\"message\\\":\\\"zation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z]\\\\nI0929 16:52:35.072098 6596 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"c0c2f725-e461-454e-a88c-c8350d62e1ef\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication/oauth-openshift\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.228954 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77dea16c-6cbf-4f00-86c9-498bb0dfc946\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bbde651cf91895d2be9af42857fee9880a7b7a9ccc270f86e07499848b23426\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.242304 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.249385 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.249411 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.249419 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.249456 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.249465 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.253584 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.265400 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.275026 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.285216 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.296758 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.306069 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.315343 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.326568 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.335537 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.345802 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.351552 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.351584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.351593 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.351606 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.351615 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.358975 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.379455 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b151d75f-29cf-4412-9252-6cf067c29b75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.394663 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.407732 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.419438 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:41Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.453962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.454187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.454291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.454381 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.454464 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.558058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.558472 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.558578 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.558699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.558797 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.661120 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.661498 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.661648 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.661854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.662019 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.765137 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.765290 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.765357 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.765386 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.765409 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.868303 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.868382 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.868400 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.868420 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.868463 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.970485 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.970526 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.970536 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.970552 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:41 crc kubenswrapper[4592]: I0929 16:52:41.970564 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:41Z","lastTransitionTime":"2025-09-29T16:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.072843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.072878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.072887 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.072904 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.072918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.174836 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.174906 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.174929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.174959 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.174981 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.182095 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.182194 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.182227 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:42 crc kubenswrapper[4592]: E0929 16:52:42.182385 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:42 crc kubenswrapper[4592]: E0929 16:52:42.182632 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:42 crc kubenswrapper[4592]: E0929 16:52:42.182722 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.277829 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.277864 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.277873 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.277886 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.277897 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.380753 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.380803 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.380813 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.380829 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.380844 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.484521 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.484558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.484570 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.484584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.484594 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.587303 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.587346 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.587357 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.587371 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.587381 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.689889 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.689931 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.689940 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.689953 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.689961 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.793389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.793699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.793847 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.793991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.794104 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.896890 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.896962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.896976 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.896991 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.897005 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.999033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.999074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.999085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.999100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:42 crc kubenswrapper[4592]: I0929 16:52:42.999111 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:42Z","lastTransitionTime":"2025-09-29T16:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.102106 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.102396 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.102461 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.102527 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.102587 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.182498 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:43 crc kubenswrapper[4592]: E0929 16:52:43.183273 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.204668 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.204710 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.204722 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.204739 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.204751 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.307843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.307884 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.307894 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.307908 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.307918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.411204 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.411297 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.411326 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.411366 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.411392 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.514275 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.514345 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.514357 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.514374 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.514390 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.616617 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.616662 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.616674 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.616690 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.616702 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.718912 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.718950 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.718960 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.718975 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.718985 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.821058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.821117 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.821137 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.821205 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.821227 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.924328 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.924374 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.924386 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.924403 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:43 crc kubenswrapper[4592]: I0929 16:52:43.924419 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:43Z","lastTransitionTime":"2025-09-29T16:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.026375 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.026425 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.026441 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.026460 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.026474 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.129017 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.129056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.129068 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.129085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.129098 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.183027 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:44 crc kubenswrapper[4592]: E0929 16:52:44.183205 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.183047 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:44 crc kubenswrapper[4592]: E0929 16:52:44.183289 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.183027 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:44 crc kubenswrapper[4592]: E0929 16:52:44.183373 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.232061 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.232121 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.232134 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.232187 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.232198 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.340052 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.340333 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.340418 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.340567 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.340648 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.443678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.443720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.443728 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.443741 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.443751 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.546201 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.546475 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.546545 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.546611 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.546682 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.648928 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.648963 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.648972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.648985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.648994 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.751310 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.751364 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.751389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.751412 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.751429 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.854479 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.854529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.854547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.854565 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.854577 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.957094 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.957124 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.957133 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.957159 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:44 crc kubenswrapper[4592]: I0929 16:52:44.957168 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:44Z","lastTransitionTime":"2025-09-29T16:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.059785 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.059825 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.059834 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.059848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.059857 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.163042 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.163086 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.163097 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.163114 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.163127 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.182811 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:45 crc kubenswrapper[4592]: E0929 16:52:45.183189 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.267324 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.267852 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.267949 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.268061 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.268139 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.370814 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.371299 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.371592 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.371983 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.372461 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.475511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.475547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.475559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.475576 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.475586 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.577690 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.577738 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.577747 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.577760 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.577769 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.680025 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.680087 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.680110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.680139 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.680196 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.782882 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.782923 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.782931 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.782944 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.782954 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.885263 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.885314 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.885326 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.885343 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.885358 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.987194 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.987237 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.987248 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.987266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:45 crc kubenswrapper[4592]: I0929 16:52:45.987279 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:45Z","lastTransitionTime":"2025-09-29T16:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.090724 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.090764 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.090775 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.090791 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.090802 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.183041 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.183106 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.183084 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:46 crc kubenswrapper[4592]: E0929 16:52:46.183324 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:46 crc kubenswrapper[4592]: E0929 16:52:46.183392 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:46 crc kubenswrapper[4592]: E0929 16:52:46.183567 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.192946 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.193012 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.193036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.193065 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.193089 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.295607 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.295673 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.295691 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.295715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.295733 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.398514 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.398554 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.398570 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.398590 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.398604 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.501137 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.501244 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.501263 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.501284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.501301 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.603675 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.603711 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.603722 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.603738 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.603751 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.706101 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.706182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.706194 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.706210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.706221 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.808702 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.808736 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.808750 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.808770 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.808786 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.911283 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.911318 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.911329 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.911345 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:46 crc kubenswrapper[4592]: I0929 16:52:46.911360 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:46Z","lastTransitionTime":"2025-09-29T16:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.013798 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.013843 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.013858 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.013878 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.013892 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.116985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.117042 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.117053 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.117070 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.117083 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.182373 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:47 crc kubenswrapper[4592]: E0929 16:52:47.182578 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.219342 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.219408 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.219421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.219437 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.219448 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.321711 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.321748 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.321756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.321778 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.321787 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.424972 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.425005 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.425014 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.425027 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.425036 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.528100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.528165 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.528178 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.528196 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.528210 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.630420 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.630458 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.630467 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.630481 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.630503 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.733580 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.733661 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.733684 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.733715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.733738 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.836612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.836663 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.836678 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.836696 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.836711 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.939423 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.939497 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.939519 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.939551 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:47 crc kubenswrapper[4592]: I0929 16:52:47.939572 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:47Z","lastTransitionTime":"2025-09-29T16:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.041770 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.041849 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.041868 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.041888 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.041904 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.144569 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.144609 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.144619 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.144643 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.144654 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.182136 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.182135 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:48 crc kubenswrapper[4592]: E0929 16:52:48.182277 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.182350 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:48 crc kubenswrapper[4592]: E0929 16:52:48.182488 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:48 crc kubenswrapper[4592]: E0929 16:52:48.182561 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.247234 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.247284 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.247301 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.247316 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.247327 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.349831 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.349876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.349895 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.349931 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.349942 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.452566 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.452613 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.452629 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.452647 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.452660 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.554760 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.554807 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.554817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.554830 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.554841 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.657883 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.657930 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.657944 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.657964 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.657981 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.759927 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.759973 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.759985 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.760005 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.760019 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.862854 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.862895 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.862904 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.862917 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.862929 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.965778 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.965824 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.965835 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.965857 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:48 crc kubenswrapper[4592]: I0929 16:52:48.965873 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:48Z","lastTransitionTime":"2025-09-29T16:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.067828 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.067871 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.067886 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.067903 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.067913 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.170468 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.170504 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.170512 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.170528 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.170537 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.182789 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.182932 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.273671 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.273708 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.273715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.273730 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.273740 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.376503 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.376562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.376579 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.376602 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.376618 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.479157 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.479203 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.479213 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.479228 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.479241 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.585822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.585848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.585857 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.585868 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.585877 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.687817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.687876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.687898 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.687921 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.687936 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.790081 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.790121 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.790136 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.790177 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.790188 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.846643 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.846695 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.846708 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.846731 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.846742 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.862231 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.865055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.865101 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.865114 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.865130 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.865141 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.876877 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.880007 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.880038 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.880047 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.880063 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.880074 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.892363 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.895902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.895980 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.895992 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.896005 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.896014 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.907539 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.911865 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.911905 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.911914 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.911929 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.911938 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.924438 4592 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148064Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608864Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a4cccf40-d865-472b-8b1e-2c9ff60e2cb2\\\",\\\"systemUUID\\\":\\\"a7270dda-1e73-4054-97c0-7b6ca81df4ee\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:49Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:49 crc kubenswrapper[4592]: E0929 16:52:49.924596 4592 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.926044 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.926077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.926088 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.926103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:49 crc kubenswrapper[4592]: I0929 16:52:49.926114 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:49Z","lastTransitionTime":"2025-09-29T16:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.028410 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.028461 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.028473 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.028488 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.028499 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.130584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.130696 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.130710 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.130729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.130741 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.182890 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.182916 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.183018 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:50 crc kubenswrapper[4592]: E0929 16:52:50.183193 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:50 crc kubenswrapper[4592]: E0929 16:52:50.183327 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:50 crc kubenswrapper[4592]: E0929 16:52:50.183416 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.232646 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.232687 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.232698 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.232715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.232726 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.335311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.335568 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.335585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.335608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.335625 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.437559 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.437614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.437632 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.437653 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.437681 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.539772 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.539831 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.539848 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.539871 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.539890 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.642850 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.642931 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.642945 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.642965 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.642980 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.746019 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.746048 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.746056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.746068 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.746076 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.848943 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.849005 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.849018 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.849034 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.849044 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.952305 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.952376 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.952389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.952407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:50 crc kubenswrapper[4592]: I0929 16:52:50.952420 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:50Z","lastTransitionTime":"2025-09-29T16:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.054896 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.054949 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.054962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.054979 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.054992 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.126806 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:51 crc kubenswrapper[4592]: E0929 16:52:51.127201 4592 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:51 crc kubenswrapper[4592]: E0929 16:52:51.127337 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs podName:484e63f2-7bae-4e57-ab79-95cba3bad285 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:55.127315829 +0000 UTC m=+165.275093510 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs") pod "network-metrics-daemon-qvsjc" (UID: "484e63f2-7bae-4e57-ab79-95cba3bad285") : object "openshift-multus"/"metrics-daemon-secret" not registered Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.157551 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.157804 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.157876 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.157963 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.158036 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.183373 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:51 crc kubenswrapper[4592]: E0929 16:52:51.183526 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.199049 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-gbbtb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:22Z\\\",\\\"message\\\":\\\"2025-09-29T16:51:36+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542\\\\n2025-09-29T16:51:36+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_00228b42-4033-4628-9402-38cd1a73d542 to /host/opt/cni/bin/\\\\n2025-09-29T16:51:37Z [verbose] multus-daemon started\\\\n2025-09-29T16:51:37Z [verbose] Readiness Indicator file check\\\\n2025-09-29T16:52:22Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:52:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4mxdv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-gbbtb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.218644 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58235808-6fc6-4723-84e4-59f2d38319f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f92dd210b331f84c953447fd76ec13be27ec379ce30025e22afbd02f74d82adf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://395c250933a6ea8cbbe9edd6e94b00bc6ea702271afa49c5389bf6c2e028196b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19c18630a02bca96b9c0ffed526eacb4f788ca6e7d18ba399e911ce5c9705429\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://613aa31eda149ea545996f31b82fc43504b3d81a54f3145080e0cc0bd27adf40\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d2a3395d07588c76b6f0d5f1c614c6fe0a440c3635f6f8844f819254663cf388\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e85ef8810cb7b0919708e5cd0f5d0de5f16683ae108aee46dff8bc66e944c689\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://636f8a1a94d4f224f50bb520e63b55f7bdf5c6ba70a04b9478212c329e34de81\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n5bgk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-n7rcv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.237802 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b151d75f-29cf-4412-9252-6cf067c29b75\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c626916e89c60a1089ed6f1ab41167d76690cf60c244950a33161005c2c1045c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://910220e33f4ea4e52d24e419e847fd0a008ef93b42759b876b599a9ec523e012\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d022815100067a0f62a9ad230cdd4e9aef59946fe0cebc661d0a13884eaadde5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbb1cb45cf9d23f15147e3aad9fe4889a2bb40532ece918719f3031b6097e66b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://034a2b9e94fcce9479a231a578e91f3a8e4cafec8e7b8ece2fa1aabc7063bef9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d788e7ecffd4fec7a649dea7ffc449892e9211e74ca2205abfccf7e317125a0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56413fa0f124c9c8c73c54bb11be950d4cf0ea2d7aa147a1d44e3ca8fd9d826\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6f5e57a99b51370c3625acbaac2f95c9d5aca8f3dd5a28c757416203f51acd76\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.253568 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cffbd2ee-077c-4125-802a-10de16d314e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:52:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84e88cad0ac4de42c50a4b2895558f24e841a816af164694a6b7587400cdf7f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d1681e5c56a49343bb82cd806473236d179f256e8f8a8a66266706f1fd25b42\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f54d0bd2023c2fc530238fcda9a35b7dcec0cba65e542d325e04e04c939c63\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4d8926de325bf427ae41a8e8a67fd978d5d8ebb6c8b387c28699d59e308657f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c5db0c2fed40d2a981b4fcb2c32c6c37ad13d10a48e26109095bb254ee25ec9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"le observer\\\\nW0929 16:51:31.755988 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0929 16:51:31.756118 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0929 16:51:31.756864 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3152082402/tls.crt::/tmp/serving-cert-3152082402/tls.key\\\\\\\"\\\\nI0929 16:51:32.047990 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0929 16:51:32.057621 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0929 16:51:32.057643 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0929 16:51:32.057664 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0929 16:51:32.057668 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0929 16:51:32.075319 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0929 16:51:32.075351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075355 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0929 16:51:32.075359 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0929 16:51:32.075362 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0929 16:51:32.075365 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0929 16:51:32.076814 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0929 16:51:32.076656 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0929 16:51:32.076785 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:16Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5138283a23ecff9d68a40f64155fa86bcdfa03df2f75c88ff67e29a908254d9e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:14Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f42f1f17eeab3b93ba5eca31d04a8f12dcd9fc10e5357f21ecbbded00cf3698b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.260478 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.260517 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.260527 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.260541 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.260551 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.266123 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0288fc89-0762-48ee-8963-ea2cff1158bc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37c2a200f2c801d7aeb3cce68e10ef90693c3fe4d6d7cae23b9bf1c892a983d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf23a7bbcc8d2fcfe91de54fd0bc396954744182637ab5c79c1fd52d89fd369a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8540c8976d29eb6686964c8233305eee289c3290990aa5867bbc6df6d0ad5a15\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24c7ad73751c5dd84634dd20e56d125fd7cc3e24f2f516dad61d31a041282dbf\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.278735 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.293230 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ff62232e653e4ffa9a8bbbcc5bf9bd02a98b4d89227111b01227e27e3a5e6a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://12c4885f4f31a49731d82de797e3ed69874377c9bf7fdcb8dbdfd54f9cc7f010\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.310126 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b22efd65-426d-4220-9e18-5a84827be8ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-09-29T16:52:35Z\\\",\\\"message\\\":\\\"zation, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:35Z is after 2025-08-24T17:21:41Z]\\\\nI0929 16:52:35.072098 6596 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"c0c2f725-e461-454e-a88c-c8350d62e1ef\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication/oauth-openshift\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-authentication/oauth-openshift_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[stri\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-09-29T16:52:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-95m58\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-47pt5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.318904 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"77dea16c-6cbf-4f00-86c9-498bb0dfc946\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6bbde651cf91895d2be9af42857fee9880a7b7a9ccc270f86e07499848b23426\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d1091e1593164ad15c8a399e57e348e317afe114a957b81c9611fef33f54fe4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.328665 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.339998 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:32Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.348519 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-b9sgl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f366b299-488d-4b75-8df9-591e502330c8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fafb80e953d59618f89e6c24de1eaf083efa50e457b8dbe5749db8a30c0970a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fprqh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:36Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-b9sgl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.358675 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64794b03-cbe6-4a8e-8502-f2291c53b986\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6989bbab9bb4b87407263652d78ae04ec341fd26859b132d64ba634f25a16eff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e42e23bf55b40024a2a7e2c3fc90f8662a6f987fba0680c686023bc321978df5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmwwn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:45Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mrgtz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.362823 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.362847 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.362856 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.362870 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.362880 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.371166 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69747356-8efa-4a26-b207-42650a5f130f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7fc9003a62047ffa596709f221cf59141e220a23cee06465fd1b5b61d22d83c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f16411450cb04591a82667697fca21ff112c579742677fa2ea4a5aba95b5065\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b36a884fc04acbf89674f3948f453c869e1c4cad7ca46c5ef381bd0405272545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://02708bf5e99edba993ff787b2f2848dcd54ef116566063300c9b0b68f9ce2c4d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-09-29T16:51:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-09-29T16:51:12Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:11Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.387695 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:34Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bbdeda04cb3ca090f73ef4c24ddda3d6a31af09590e354a13c4fe0299ba6182b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.397256 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4cc986fa-6620-43ff-ae05-11c71e326035\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://63d6630d3f76690b295d01ed84d463acc9793e3b4e7538965c8efa326b703be1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-h8ggf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dfqzg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.406851 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://df8bb230ce3c422738d553bd7ac7dccdb3246e41986fa8ac50982dcc800e7b2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.415781 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-k5ts8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2f9a55e9-2c59-4873-a10c-74f3f529aa72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f0dff127353e1c8a12d10f34c374b68f69feb27710615f0250f694edc9257f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-09-29T16:51:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k9wcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:33Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-k5ts8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.425352 4592 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"484e63f2-7bae-4e57-ab79-95cba3bad285\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-09-29T16:51:47Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4tsp9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-09-29T16:51:47Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-qvsjc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-09-29T16:52:51Z is after 2025-08-24T17:21:41Z" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.465338 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.465373 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.465381 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.465394 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.465404 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.568117 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.568181 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.568196 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.568213 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.568225 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.671180 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.671210 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.671218 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.671232 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.671242 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.773584 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.773625 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.773634 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.773650 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.773663 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.877646 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.877703 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.877720 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.877741 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.877759 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.980758 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.980850 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.980869 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.980900 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:51 crc kubenswrapper[4592]: I0929 16:52:51.980920 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:51Z","lastTransitionTime":"2025-09-29T16:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.083008 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.083055 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.083066 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.083089 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.083102 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.182279 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:52 crc kubenswrapper[4592]: E0929 16:52:52.182430 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.182442 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.182506 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:52 crc kubenswrapper[4592]: E0929 16:52:52.182842 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:52 crc kubenswrapper[4592]: E0929 16:52:52.182950 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.183138 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 16:52:52 crc kubenswrapper[4592]: E0929 16:52:52.183407 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.184833 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.184875 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.184888 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.184903 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.184916 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.291276 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.291308 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.291319 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.291332 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.291344 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.393731 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.394426 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.394560 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.394660 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.394752 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.497640 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.497701 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.497717 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.497738 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.497756 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.600077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.600112 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.600121 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.600135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.600165 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.702534 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.702574 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.702583 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.702597 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.702606 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.804337 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.804380 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.804389 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.804401 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.804410 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.906426 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.906658 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.906740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.906840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:52 crc kubenswrapper[4592]: I0929 16:52:52.906918 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:52Z","lastTransitionTime":"2025-09-29T16:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.009302 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.009356 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.009368 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.009388 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.009401 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.111951 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.112291 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.112365 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.112438 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.112499 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.183074 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:53 crc kubenswrapper[4592]: E0929 16:52:53.183236 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.214925 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.214952 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.214962 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.214977 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.214986 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.316349 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.316384 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.316393 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.316405 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.316417 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.418737 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.418799 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.418815 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.418840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.418857 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.521548 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.521655 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.521670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.521688 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.521700 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.623505 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.623557 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.623573 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.623593 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.623604 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.725764 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.725817 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.725836 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.725851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.725861 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.827969 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.828010 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.828020 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.828033 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.828043 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.930670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.930724 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.930735 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.930757 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:53 crc kubenswrapper[4592]: I0929 16:52:53.930774 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:53Z","lastTransitionTime":"2025-09-29T16:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.033137 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.033175 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.033183 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.033196 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.033205 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.136074 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.136114 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.136126 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.136174 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.136188 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.182762 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.182762 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.182745 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:54 crc kubenswrapper[4592]: E0929 16:52:54.183059 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:54 crc kubenswrapper[4592]: E0929 16:52:54.183120 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:54 crc kubenswrapper[4592]: E0929 16:52:54.182913 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.239091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.239166 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.239183 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.239202 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.239214 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.341030 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.341065 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.341077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.341092 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.341102 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.443467 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.443505 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.443525 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.443562 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.443573 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.546360 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.546420 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.546439 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.546461 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.546479 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.648409 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.648448 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.648462 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.648480 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.648520 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.751106 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.751202 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.751226 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.751252 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.751272 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.854565 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.854599 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.854608 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.854655 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.854675 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.957630 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.957714 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.957728 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.957744 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:54 crc kubenswrapper[4592]: I0929 16:52:54.957755 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:54Z","lastTransitionTime":"2025-09-29T16:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.061476 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.061740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.061822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.061898 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.061979 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.164036 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.164097 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.164110 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.164126 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.164136 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.182692 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:55 crc kubenswrapper[4592]: E0929 16:52:55.182856 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.266220 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.266297 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.266311 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.266326 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.266337 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.369653 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.369725 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.369734 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.369747 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.369757 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.472326 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.472359 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.472371 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.472388 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.472402 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.575308 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.575373 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.575385 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.575407 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.575423 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.678216 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.678259 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.678274 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.678296 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.678311 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.781630 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.781672 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.781685 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.781702 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.781715 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.884769 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.884822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.884835 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.884853 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.884865 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.987415 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.987470 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.987487 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.987509 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:55 crc kubenswrapper[4592]: I0929 16:52:55.987523 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:55Z","lastTransitionTime":"2025-09-29T16:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.089907 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.089936 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.089948 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.089964 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.089975 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.182933 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.182959 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:56 crc kubenswrapper[4592]: E0929 16:52:56.183219 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.182982 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:56 crc kubenswrapper[4592]: E0929 16:52:56.183319 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:56 crc kubenswrapper[4592]: E0929 16:52:56.183497 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.192034 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.192093 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.192107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.192124 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.192136 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.295537 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.295571 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.295579 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.295596 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.295605 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.398747 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.398790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.398802 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.398818 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.398829 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.502098 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.502209 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.502235 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.502266 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.502288 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.605022 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.605077 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.605091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.605107 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.605119 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.707649 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.707715 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.707727 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.707741 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.707753 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.810514 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.810585 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.810597 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.810612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.810623 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.913851 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.913902 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.913910 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.913924 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:56 crc kubenswrapper[4592]: I0929 16:52:56.913936 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:56Z","lastTransitionTime":"2025-09-29T16:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.016511 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.016587 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.016599 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.016614 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.016627 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.118605 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.118648 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.118672 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.118693 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.118706 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.182896 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:57 crc kubenswrapper[4592]: E0929 16:52:57.183058 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.220975 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.221027 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.221038 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.221059 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.221071 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.324355 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.324414 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.324432 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.324456 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.324471 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.427058 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.427104 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.427116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.427132 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.427158 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.529714 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.529774 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.529783 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.529796 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.529804 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.632483 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.632529 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.632542 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.632558 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.632570 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.735006 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.735082 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.735093 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.735109 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.735120 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.837609 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.837664 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.837680 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.837698 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.838046 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.941279 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.941346 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.941365 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.941670 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:57 crc kubenswrapper[4592]: I0929 16:52:57.941707 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:57Z","lastTransitionTime":"2025-09-29T16:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.044692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.044738 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.044752 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.044770 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.044784 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.146786 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.146838 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.146853 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.146871 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.146883 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.182350 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.182368 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.182365 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:52:58 crc kubenswrapper[4592]: E0929 16:52:58.182512 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:52:58 crc kubenswrapper[4592]: E0929 16:52:58.182561 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:52:58 crc kubenswrapper[4592]: E0929 16:52:58.182626 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.250731 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.250790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.250813 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.250840 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.250861 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.353688 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.353729 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.353740 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.353756 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.353766 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.456590 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.456659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.456684 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.456714 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.456738 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.559392 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.559466 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.559489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.559518 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.559541 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.662500 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.662573 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.662612 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.662792 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.662852 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.766085 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.766182 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.766199 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.766221 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.766240 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.869213 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.869259 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.869267 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.869279 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.869289 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.973091 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.973185 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.973202 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.973225 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:58 crc kubenswrapper[4592]: I0929 16:52:58.973242 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:58Z","lastTransitionTime":"2025-09-29T16:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.075100 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.075136 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.075160 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.075177 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.075188 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.177775 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.177847 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.177868 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.177891 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.177909 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.183095 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:52:59 crc kubenswrapper[4592]: E0929 16:52:59.183244 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.280490 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.280542 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.280555 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.280664 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.280681 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.383329 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.383380 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.383392 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.383411 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.383423 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.486659 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.486705 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.486716 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.486732 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.486743 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.589421 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.589462 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.589472 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.589489 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.589500 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.691790 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.691828 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.691836 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.691850 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.691859 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.793434 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.793518 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.793530 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.793547 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.793557 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.897056 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.897103 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.897116 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.897135 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.897169 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.999692 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.999731 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.999745 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:52:59 crc kubenswrapper[4592]: I0929 16:52:59.999760 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:52:59.999771 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:52:59Z","lastTransitionTime":"2025-09-29T16:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.101607 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.101665 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.101679 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.101699 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.101711 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:53:00Z","lastTransitionTime":"2025-09-29T16:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.182092 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.182092 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.182240 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:00 crc kubenswrapper[4592]: E0929 16:53:00.182341 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:00 crc kubenswrapper[4592]: E0929 16:53:00.182471 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:00 crc kubenswrapper[4592]: E0929 16:53:00.182681 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.204051 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.204106 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.204119 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.204137 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.204171 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:53:00Z","lastTransitionTime":"2025-09-29T16:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.214723 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.214812 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.214822 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.214837 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.214846 4592 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-09-29T16:53:00Z","lastTransitionTime":"2025-09-29T16:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.271334 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw"] Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.273234 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.276590 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.277944 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.278133 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.278268 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.331956 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.332004 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.332049 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.332092 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.332138 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.369246 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.369228314 podStartE2EDuration="22.369228314s" podCreationTimestamp="2025-09-29 16:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.368432851 +0000 UTC m=+110.516210532" watchObservedRunningTime="2025-09-29 16:53:00.369228314 +0000 UTC m=+110.517005995" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.399385 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podStartSLOduration=89.399367126 podStartE2EDuration="1m29.399367126s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.399266033 +0000 UTC m=+110.547043724" watchObservedRunningTime="2025-09-29 16:53:00.399367126 +0000 UTC m=+110.547144807" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.428101 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-b9sgl" podStartSLOduration=89.428082575 podStartE2EDuration="1m29.428082575s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.416529009 +0000 UTC m=+110.564306690" watchObservedRunningTime="2025-09-29 16:53:00.428082575 +0000 UTC m=+110.575860266" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433010 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433054 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433083 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433111 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433127 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433171 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433214 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.433889 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.438976 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.462540 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kf4xw\" (UID: \"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.467575 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=62.467558305 podStartE2EDuration="1m2.467558305s" podCreationTimestamp="2025-09-29 16:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.467227985 +0000 UTC m=+110.615005676" watchObservedRunningTime="2025-09-29 16:53:00.467558305 +0000 UTC m=+110.615335986" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.467708 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mrgtz" podStartSLOduration=88.467704619 podStartE2EDuration="1m28.467704619s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.42792437 +0000 UTC m=+110.575702061" watchObservedRunningTime="2025-09-29 16:53:00.467704619 +0000 UTC m=+110.615482300" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.552318 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-k5ts8" podStartSLOduration=89.552296499 podStartE2EDuration="1m29.552296499s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.55198 +0000 UTC m=+110.699757691" watchObservedRunningTime="2025-09-29 16:53:00.552296499 +0000 UTC m=+110.700074190" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.586721 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.586695639 podStartE2EDuration="1m28.586695639s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.568319719 +0000 UTC m=+110.716097410" watchObservedRunningTime="2025-09-29 16:53:00.586695639 +0000 UTC m=+110.734473340" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.587106 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-gbbtb" podStartSLOduration=89.587098661 podStartE2EDuration="1m29.587098661s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.586585724 +0000 UTC m=+110.734363405" watchObservedRunningTime="2025-09-29 16:53:00.587098661 +0000 UTC m=+110.734876342" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.596390 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.648906 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-n7rcv" podStartSLOduration=89.648890529 podStartE2EDuration="1m29.648890529s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.61151184 +0000 UTC m=+110.759289521" watchObservedRunningTime="2025-09-29 16:53:00.648890529 +0000 UTC m=+110.796668210" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.666075 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=27.666059182 podStartE2EDuration="27.666059182s" podCreationTimestamp="2025-09-29 16:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.649681442 +0000 UTC m=+110.797459143" watchObservedRunningTime="2025-09-29 16:53:00.666059182 +0000 UTC m=+110.813836863" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.666347 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.66634437 podStartE2EDuration="1m28.66634437s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.666036131 +0000 UTC m=+110.813813812" watchObservedRunningTime="2025-09-29 16:53:00.66634437 +0000 UTC m=+110.814122051" Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.754043 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" event={"ID":"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4","Type":"ContainerStarted","Data":"50d4e52a5239d1a03378f3d3b5531ea20c4ac651556ee759348b4dc36d81cb2f"} Sep 29 16:53:00 crc kubenswrapper[4592]: I0929 16:53:00.754093 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" event={"ID":"1f27d4cf-03a8-4b02-8b7f-db2cfeb5b2d4","Type":"ContainerStarted","Data":"9e9791b7fb6e44fbf25554945984e319dab7f2892007e70b0417dedf71772db6"} Sep 29 16:53:01 crc kubenswrapper[4592]: I0929 16:53:01.182213 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:01 crc kubenswrapper[4592]: E0929 16:53:01.183074 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:02 crc kubenswrapper[4592]: I0929 16:53:02.182423 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:02 crc kubenswrapper[4592]: I0929 16:53:02.182441 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:02 crc kubenswrapper[4592]: E0929 16:53:02.182544 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:02 crc kubenswrapper[4592]: I0929 16:53:02.182604 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:02 crc kubenswrapper[4592]: E0929 16:53:02.182669 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:02 crc kubenswrapper[4592]: E0929 16:53:02.182710 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:03 crc kubenswrapper[4592]: I0929 16:53:03.182316 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:03 crc kubenswrapper[4592]: E0929 16:53:03.182522 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:04 crc kubenswrapper[4592]: I0929 16:53:04.182719 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:04 crc kubenswrapper[4592]: I0929 16:53:04.182714 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:04 crc kubenswrapper[4592]: I0929 16:53:04.183475 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:04 crc kubenswrapper[4592]: E0929 16:53:04.183903 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:04 crc kubenswrapper[4592]: E0929 16:53:04.183707 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:04 crc kubenswrapper[4592]: E0929 16:53:04.184175 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:05 crc kubenswrapper[4592]: I0929 16:53:05.182353 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:05 crc kubenswrapper[4592]: E0929 16:53:05.182508 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:06 crc kubenswrapper[4592]: I0929 16:53:06.182128 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:06 crc kubenswrapper[4592]: E0929 16:53:06.182260 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:06 crc kubenswrapper[4592]: I0929 16:53:06.182129 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:06 crc kubenswrapper[4592]: E0929 16:53:06.182333 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:06 crc kubenswrapper[4592]: I0929 16:53:06.182128 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:06 crc kubenswrapper[4592]: E0929 16:53:06.182393 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:07 crc kubenswrapper[4592]: I0929 16:53:07.183197 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:07 crc kubenswrapper[4592]: I0929 16:53:07.183579 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 16:53:07 crc kubenswrapper[4592]: E0929 16:53:07.184466 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-47pt5_openshift-ovn-kubernetes(b22efd65-426d-4220-9e18-5a84827be8ac)\"" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" Sep 29 16:53:07 crc kubenswrapper[4592]: E0929 16:53:07.184625 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.182017 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.182426 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.182447 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:08 crc kubenswrapper[4592]: E0929 16:53:08.182472 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:08 crc kubenswrapper[4592]: E0929 16:53:08.182567 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:08 crc kubenswrapper[4592]: E0929 16:53:08.182720 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.779712 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/1.log" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.780632 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/0.log" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.780710 4592 generic.go:334] "Generic (PLEG): container finished" podID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" containerID="635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be" exitCode=1 Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.780746 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerDied","Data":"635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be"} Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.780781 4592 scope.go:117] "RemoveContainer" containerID="8e3890223f0189f9ac595318b6b37a6815a2f5c5d7a842f34645945d655877fe" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.781191 4592 scope.go:117] "RemoveContainer" containerID="635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be" Sep 29 16:53:08 crc kubenswrapper[4592]: E0929 16:53:08.781375 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-gbbtb_openshift-multus(2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89)\"" pod="openshift-multus/multus-gbbtb" podUID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" Sep 29 16:53:08 crc kubenswrapper[4592]: I0929 16:53:08.797882 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kf4xw" podStartSLOduration=97.797852857 podStartE2EDuration="1m37.797852857s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:00.767885117 +0000 UTC m=+110.915662798" watchObservedRunningTime="2025-09-29 16:53:08.797852857 +0000 UTC m=+118.945630578" Sep 29 16:53:09 crc kubenswrapper[4592]: I0929 16:53:09.182753 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:09 crc kubenswrapper[4592]: E0929 16:53:09.182885 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:09 crc kubenswrapper[4592]: I0929 16:53:09.785745 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/1.log" Sep 29 16:53:10 crc kubenswrapper[4592]: I0929 16:53:10.182747 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:10 crc kubenswrapper[4592]: I0929 16:53:10.182781 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:10 crc kubenswrapper[4592]: E0929 16:53:10.182880 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:10 crc kubenswrapper[4592]: I0929 16:53:10.182963 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:10 crc kubenswrapper[4592]: E0929 16:53:10.183029 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:10 crc kubenswrapper[4592]: E0929 16:53:10.183230 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:11 crc kubenswrapper[4592]: E0929 16:53:11.117987 4592 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Sep 29 16:53:11 crc kubenswrapper[4592]: I0929 16:53:11.182590 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:11 crc kubenswrapper[4592]: E0929 16:53:11.183762 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:11 crc kubenswrapper[4592]: E0929 16:53:11.381035 4592 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 16:53:12 crc kubenswrapper[4592]: I0929 16:53:12.182036 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:12 crc kubenswrapper[4592]: E0929 16:53:12.182369 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:12 crc kubenswrapper[4592]: I0929 16:53:12.182467 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:12 crc kubenswrapper[4592]: E0929 16:53:12.182762 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:12 crc kubenswrapper[4592]: I0929 16:53:12.182105 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:12 crc kubenswrapper[4592]: E0929 16:53:12.183706 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:13 crc kubenswrapper[4592]: I0929 16:53:13.182549 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:13 crc kubenswrapper[4592]: E0929 16:53:13.182690 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:14 crc kubenswrapper[4592]: I0929 16:53:14.183031 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:14 crc kubenswrapper[4592]: I0929 16:53:14.183090 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:14 crc kubenswrapper[4592]: I0929 16:53:14.183090 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:14 crc kubenswrapper[4592]: E0929 16:53:14.183211 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:14 crc kubenswrapper[4592]: E0929 16:53:14.183326 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:14 crc kubenswrapper[4592]: E0929 16:53:14.183419 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:15 crc kubenswrapper[4592]: I0929 16:53:15.182688 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:15 crc kubenswrapper[4592]: E0929 16:53:15.182834 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:16 crc kubenswrapper[4592]: I0929 16:53:16.182817 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:16 crc kubenswrapper[4592]: I0929 16:53:16.182867 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:16 crc kubenswrapper[4592]: I0929 16:53:16.182875 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:16 crc kubenswrapper[4592]: E0929 16:53:16.184186 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:16 crc kubenswrapper[4592]: E0929 16:53:16.184286 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:16 crc kubenswrapper[4592]: E0929 16:53:16.184365 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:16 crc kubenswrapper[4592]: E0929 16:53:16.382804 4592 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 16:53:17 crc kubenswrapper[4592]: I0929 16:53:17.182429 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:17 crc kubenswrapper[4592]: E0929 16:53:17.182683 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:18 crc kubenswrapper[4592]: I0929 16:53:18.182706 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:18 crc kubenswrapper[4592]: E0929 16:53:18.182832 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:18 crc kubenswrapper[4592]: I0929 16:53:18.182718 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:18 crc kubenswrapper[4592]: E0929 16:53:18.182904 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:18 crc kubenswrapper[4592]: I0929 16:53:18.182706 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:18 crc kubenswrapper[4592]: E0929 16:53:18.182946 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:19 crc kubenswrapper[4592]: I0929 16:53:19.183143 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:19 crc kubenswrapper[4592]: E0929 16:53:19.183334 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:20 crc kubenswrapper[4592]: I0929 16:53:20.182046 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:20 crc kubenswrapper[4592]: I0929 16:53:20.182104 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:20 crc kubenswrapper[4592]: E0929 16:53:20.182230 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:20 crc kubenswrapper[4592]: E0929 16:53:20.182344 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:20 crc kubenswrapper[4592]: I0929 16:53:20.182074 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:20 crc kubenswrapper[4592]: E0929 16:53:20.182851 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:21 crc kubenswrapper[4592]: I0929 16:53:21.183005 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:21 crc kubenswrapper[4592]: E0929 16:53:21.184022 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:21 crc kubenswrapper[4592]: E0929 16:53:21.383290 4592 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.182971 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.183015 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.183034 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:22 crc kubenswrapper[4592]: E0929 16:53:22.183107 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:22 crc kubenswrapper[4592]: E0929 16:53:22.183240 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:22 crc kubenswrapper[4592]: E0929 16:53:22.183599 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.183886 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.824955 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/3.log" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.827959 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerStarted","Data":"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96"} Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.828298 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:53:22 crc kubenswrapper[4592]: I0929 16:53:22.851370 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podStartSLOduration=110.851353109 podStartE2EDuration="1m50.851353109s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:22.851220055 +0000 UTC m=+132.998997746" watchObservedRunningTime="2025-09-29 16:53:22.851353109 +0000 UTC m=+132.999130790" Sep 29 16:53:23 crc kubenswrapper[4592]: I0929 16:53:23.024346 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qvsjc"] Sep 29 16:53:23 crc kubenswrapper[4592]: I0929 16:53:23.024455 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:23 crc kubenswrapper[4592]: E0929 16:53:23.024571 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:23 crc kubenswrapper[4592]: I0929 16:53:23.183279 4592 scope.go:117] "RemoveContainer" containerID="635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be" Sep 29 16:53:23 crc kubenswrapper[4592]: I0929 16:53:23.833062 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/1.log" Sep 29 16:53:23 crc kubenswrapper[4592]: I0929 16:53:23.833564 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerStarted","Data":"1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a"} Sep 29 16:53:24 crc kubenswrapper[4592]: I0929 16:53:24.182112 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:24 crc kubenswrapper[4592]: I0929 16:53:24.182182 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:24 crc kubenswrapper[4592]: I0929 16:53:24.182249 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:24 crc kubenswrapper[4592]: E0929 16:53:24.182365 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:24 crc kubenswrapper[4592]: E0929 16:53:24.182582 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:24 crc kubenswrapper[4592]: E0929 16:53:24.182622 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:25 crc kubenswrapper[4592]: I0929 16:53:25.182278 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:25 crc kubenswrapper[4592]: E0929 16:53:25.182417 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-qvsjc" podUID="484e63f2-7bae-4e57-ab79-95cba3bad285" Sep 29 16:53:26 crc kubenswrapper[4592]: I0929 16:53:26.182571 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:26 crc kubenswrapper[4592]: I0929 16:53:26.182632 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:26 crc kubenswrapper[4592]: E0929 16:53:26.182735 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Sep 29 16:53:26 crc kubenswrapper[4592]: I0929 16:53:26.182571 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:26 crc kubenswrapper[4592]: E0929 16:53:26.182838 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Sep 29 16:53:26 crc kubenswrapper[4592]: E0929 16:53:26.182905 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Sep 29 16:53:27 crc kubenswrapper[4592]: I0929 16:53:27.182026 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:27 crc kubenswrapper[4592]: I0929 16:53:27.185579 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Sep 29 16:53:27 crc kubenswrapper[4592]: I0929 16:53:27.185870 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.182991 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.183051 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.183017 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.185667 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.186627 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.187332 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Sep 29 16:53:28 crc kubenswrapper[4592]: I0929 16:53:28.187837 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.760548 4592 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.797800 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.798483 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.800020 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.800309 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.801889 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8pdqk"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.802433 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.804819 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.805613 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-smrv8"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.805728 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.806281 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.806351 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.806718 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.807340 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.807759 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.819676 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.819907 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.820102 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820504 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820538 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820606 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"encryption-config-1": failed to list *v1.Secret: secrets "encryption-config-1" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820621 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"encryption-config-1\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"encryption-config-1\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820662 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"etcd-client": failed to list *v1.Secret: secrets "etcd-client" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820674 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"etcd-client\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"etcd-client\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820658 4592 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv": failed to list *v1.Secret: secrets "openshift-apiserver-operator-dockercfg-xtcjv" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820694 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-dockercfg-xtcjv\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-dockercfg-xtcjv\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820764 4592 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config": failed to list *v1.ConfigMap: configmaps "openshift-apiserver-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820779 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-apiserver-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.820852 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"etcd-serving-ca": failed to list *v1.ConfigMap: configmaps "etcd-serving-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.820913 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"etcd-serving-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"etcd-serving-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.821322 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.821473 4592 reflector.go:561] object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert": failed to list *v1.Secret: secrets "openshift-apiserver-operator-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-apiserver-operator": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.821498 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-apiserver-operator\"/\"openshift-apiserver-operator-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-apiserver-operator-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-apiserver-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.821331 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq": failed to list *v1.Secret: secrets "oauth-apiserver-sa-dockercfg-6r2bq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.821554 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"oauth-apiserver-sa-dockercfg-6r2bq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-apiserver-sa-dockercfg-6r2bq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822073 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-49g4p"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822225 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822254 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822391 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822426 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822527 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822579 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822582 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822614 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822621 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.822920 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.823433 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.823447 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: W0929 16:53:30.824427 4592 reflector.go:561] object-"openshift-oauth-apiserver"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Sep 29 16:53:30 crc kubenswrapper[4592]: E0929 16:53:30.824546 4592 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.836551 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.848115 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.849451 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.849984 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.853452 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.853673 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.853849 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854448 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854606 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854630 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854731 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854904 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.854981 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855053 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855123 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855410 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855519 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855617 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855742 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.855779 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856221 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856312 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856314 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856421 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856446 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qbzp\" (UniqueName: \"kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856466 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7df253-52f7-4764-8a61-fb4e2a389634-serving-cert\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856481 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7xg4\" (UniqueName: \"kubernetes.io/projected/3aac1447-7fa7-4b9c-bc79-e194dba65129-kube-api-access-w7xg4\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856495 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856525 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856542 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-config\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856572 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p657f\" (UniqueName: \"kubernetes.io/projected/e4394fc0-2772-479f-84e4-bbdb7d3b493a-kube-api-access-p657f\") pod \"downloads-7954f5f757-49g4p\" (UID: \"e4394fc0-2772-479f-84e4-bbdb7d3b493a\") " pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856589 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856604 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856619 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856633 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856639 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856646 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-images\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856661 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tffmm\" (UniqueName: \"kubernetes.io/projected/92eb6f89-2332-47d7-a04c-19e63442c882-kube-api-access-tffmm\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856688 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5h259\" (UniqueName: \"kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856263 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856703 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-policies\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856722 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/92eb6f89-2332-47d7-a04c-19e63442c882-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856736 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856755 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6d7df253-52f7-4764-8a61-fb4e2a389634-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856798 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856823 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856843 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jz6w\" (UniqueName: \"kubernetes.io/projected/6d7df253-52f7-4764-8a61-fb4e2a389634-kube-api-access-5jz6w\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856856 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856870 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aac1447-7fa7-4b9c-bc79-e194dba65129-serving-cert\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856921 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856939 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856956 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856973 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-config\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.856987 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857004 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857018 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857035 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857061 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-trusted-ca\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857076 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857092 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctztn\" (UniqueName: \"kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857117 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857132 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2mv5\" (UniqueName: \"kubernetes.io/projected/a89d7bfa-d740-4792-8fef-d71c8da7559e-kube-api-access-k2mv5\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857163 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-dir\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857179 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857200 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.857208 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.861262 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.861307 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.861439 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.861594 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.861791 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.862431 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.863039 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.863276 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.863444 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.864647 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtlgd"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.865280 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.865549 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-68w2v"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.865589 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.865563 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.866889 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.867281 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.871472 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.871907 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.873873 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.874733 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.874770 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.875085 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.875520 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.875783 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.875977 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.876199 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8q6sl"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.876702 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.877113 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8gn4w"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.877773 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.879605 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.896475 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-t4tpk"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.897067 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.897658 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-48whw"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.898348 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.899674 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.900086 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.914696 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.915129 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.915306 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.915408 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.917525 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.917725 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.917874 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.918964 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.921213 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.922498 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.927576 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.941991 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.942288 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.943100 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.944954 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-46ndj"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.945689 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.945965 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.946606 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.946716 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.946910 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947079 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947244 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947297 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947344 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947453 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947609 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947632 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.948263 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.947662 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.948444 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.949266 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb"] Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.949615 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.957856 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958197 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958599 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-machine-approver-tls\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958627 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958643 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e031dd8a-d542-4dca-8bb7-12e36101c41e-serving-cert\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958667 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958684 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958700 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958717 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958733 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5dhv\" (UniqueName: \"kubernetes.io/projected/ff85be0b-4fe9-43fa-941f-c00f69b7f459-kube-api-access-c5dhv\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958749 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-config\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958772 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958835 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958862 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958883 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hswdp\" (UniqueName: \"kubernetes.io/projected/81763dda-c34c-4bdf-a422-bbb5a76d8c95-kube-api-access-hswdp\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958898 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d645a2f8-5d71-4d9e-9bfa-487388f618ca-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958912 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d645a2f8-5d71-4d9e-9bfa-487388f618ca-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958930 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958945 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-image-import-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958963 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.958985 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3650f5fa-7a17-4b65-8b55-5bb528beba58-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959001 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx2kn\" (UniqueName: \"kubernetes.io/projected/3650f5fa-7a17-4b65-8b55-5bb528beba58-kube-api-access-hx2kn\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959017 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-trusted-ca\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959032 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959048 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959064 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctztn\" (UniqueName: \"kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959080 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/440d74af-4040-42bf-83cd-e13fb8526d17-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959163 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959187 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2mv5\" (UniqueName: \"kubernetes.io/projected/a89d7bfa-d740-4792-8fef-d71c8da7559e-kube-api-access-k2mv5\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959207 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-dir\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959230 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/440d74af-4040-42bf-83cd-e13fb8526d17-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959252 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zrpz\" (UniqueName: \"kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959273 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-serving-cert\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959292 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-metrics-certs\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959311 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5nlk\" (UniqueName: \"kubernetes.io/projected/b3554952-1f35-4ce9-9a10-1caa25c188fb-kube-api-access-f5nlk\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959328 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-config\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959350 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959370 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/81763dda-c34c-4bdf-a422-bbb5a76d8c95-metrics-tls\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959394 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959416 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959436 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959458 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a608b5e-6d79-4439-adcf-7f2549890bba-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959484 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959506 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qbzp\" (UniqueName: \"kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959527 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959547 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdcpp\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-kube-api-access-jdcpp\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959570 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7df253-52f7-4764-8a61-fb4e2a389634-serving-cert\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959595 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7xg4\" (UniqueName: \"kubernetes.io/projected/3aac1447-7fa7-4b9c-bc79-e194dba65129-kube-api-access-w7xg4\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959617 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959640 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959663 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959684 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959705 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr624\" (UniqueName: \"kubernetes.io/projected/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-kube-api-access-fr624\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959725 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3554952-1f35-4ce9-9a10-1caa25c188fb-service-ca-bundle\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959757 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959781 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff85be0b-4fe9-43fa-941f-c00f69b7f459-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959807 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-config\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959828 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxhp6\" (UniqueName: \"kubernetes.io/projected/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-kube-api-access-rxhp6\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959852 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959882 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-encryption-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959902 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfhj9\" (UniqueName: \"kubernetes.io/projected/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-kube-api-access-bfhj9\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959923 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a608b5e-6d79-4439-adcf-7f2549890bba-proxy-tls\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959945 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p657f\" (UniqueName: \"kubernetes.io/projected/e4394fc0-2772-479f-84e4-bbdb7d3b493a-kube-api-access-p657f\") pod \"downloads-7954f5f757-49g4p\" (UID: \"e4394fc0-2772-479f-84e4-bbdb7d3b493a\") " pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959965 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.959986 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960006 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960024 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit-dir\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960044 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjq6d\" (UniqueName: \"kubernetes.io/projected/e031dd8a-d542-4dca-8bb7-12e36101c41e-kube-api-access-sjq6d\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960075 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960098 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960119 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-default-certificate\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960172 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-images\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960195 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tffmm\" (UniqueName: \"kubernetes.io/projected/92eb6f89-2332-47d7-a04c-19e63442c882-kube-api-access-tffmm\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960217 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5h259\" (UniqueName: \"kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960243 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-config\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960263 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d645a2f8-5d71-4d9e-9bfa-487388f618ca-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960287 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/92eb6f89-2332-47d7-a04c-19e63442c882-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960310 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960331 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-policies\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960353 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960376 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6d7df253-52f7-4764-8a61-fb4e2a389634-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960421 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-client\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960445 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvf9q\" (UniqueName: \"kubernetes.io/projected/5a608b5e-6d79-4439-adcf-7f2549890bba-kube-api-access-mvf9q\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960473 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jz6w\" (UniqueName: \"kubernetes.io/projected/6d7df253-52f7-4764-8a61-fb4e2a389634-kube-api-access-5jz6w\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960489 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960521 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960537 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-node-pullsecrets\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960552 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-auth-proxy-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960567 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-stats-auth\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960584 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aac1447-7fa7-4b9c-bc79-e194dba65129-serving-cert\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960598 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960619 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.960639 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-service-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.962014 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-config\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.963472 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.963516 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.963925 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.964364 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.965547 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-config\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.966221 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.966377 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:30 crc kubenswrapper[4592]: I0929 16:53:30.966775 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/92eb6f89-2332-47d7-a04c-19e63442c882-images\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.977102 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3aac1447-7fa7-4b9c-bc79-e194dba65129-trusted-ca\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.985348 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/92eb6f89-2332-47d7-a04c-19e63442c882-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.985599 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986642 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989740 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.998891 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.999502 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986712 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.994846 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.999889 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.995406 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-policies\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.996959 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/6d7df253-52f7-4764-8a61-fb4e2a389634-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.997172 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-audit-dir\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986764 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986894 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986920 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.986990 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.987031 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.987077 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.987413 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.987716 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989449 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989693 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989726 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989806 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989838 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989866 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.009102 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.009706 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:30.989931 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.010673 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.010743 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.011285 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.011439 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d78kk"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.011318 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.011355 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.012815 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.013104 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.016224 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.024208 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.024949 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.027196 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.027698 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3aac1447-7fa7-4b9c-bc79-e194dba65129-serving-cert\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.027926 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7df253-52f7-4764-8a61-fb4e2a389634-serving-cert\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.028691 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.029238 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.029547 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.032555 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.033529 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.037132 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tsm97"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.037679 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.038715 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.039284 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.042803 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.043276 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.043592 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.043707 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.043914 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.046764 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.047397 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.047520 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.048001 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.048038 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061657 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5dhv\" (UniqueName: \"kubernetes.io/projected/ff85be0b-4fe9-43fa-941f-c00f69b7f459-kube-api-access-c5dhv\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061747 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hswdp\" (UniqueName: \"kubernetes.io/projected/81763dda-c34c-4bdf-a422-bbb5a76d8c95-kube-api-access-hswdp\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061828 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d645a2f8-5d71-4d9e-9bfa-487388f618ca-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061899 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d645a2f8-5d71-4d9e-9bfa-487388f618ca-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.061972 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-image-import-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062042 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062120 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3650f5fa-7a17-4b65-8b55-5bb528beba58-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062217 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx2kn\" (UniqueName: \"kubernetes.io/projected/3650f5fa-7a17-4b65-8b55-5bb528beba58-kube-api-access-hx2kn\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062290 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062363 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/440d74af-4040-42bf-83cd-e13fb8526d17-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062436 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062509 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/440d74af-4040-42bf-83cd-e13fb8526d17-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062579 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zrpz\" (UniqueName: \"kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062648 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-serving-cert\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062739 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-metrics-certs\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062811 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5nlk\" (UniqueName: \"kubernetes.io/projected/b3554952-1f35-4ce9-9a10-1caa25c188fb-kube-api-access-f5nlk\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062886 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-config\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.062954 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/81763dda-c34c-4bdf-a422-bbb5a76d8c95-metrics-tls\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063057 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063167 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063276 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a608b5e-6d79-4439-adcf-7f2549890bba-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063374 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdcpp\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-kube-api-access-jdcpp\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063447 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063518 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr624\" (UniqueName: \"kubernetes.io/projected/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-kube-api-access-fr624\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063588 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3554952-1f35-4ce9-9a10-1caa25c188fb-service-ca-bundle\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063666 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063736 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063811 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff85be0b-4fe9-43fa-941f-c00f69b7f459-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063887 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxhp6\" (UniqueName: \"kubernetes.io/projected/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-kube-api-access-rxhp6\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063952 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064032 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-encryption-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064102 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfhj9\" (UniqueName: \"kubernetes.io/projected/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-kube-api-access-bfhj9\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064188 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a608b5e-6d79-4439-adcf-7f2549890bba-proxy-tls\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064269 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064336 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit-dir\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064409 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjq6d\" (UniqueName: \"kubernetes.io/projected/e031dd8a-d542-4dca-8bb7-12e36101c41e-kube-api-access-sjq6d\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064498 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-default-certificate\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064608 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-config\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064680 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d645a2f8-5d71-4d9e-9bfa-487388f618ca-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064758 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.064832 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-client\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065136 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvf9q\" (UniqueName: \"kubernetes.io/projected/5a608b5e-6d79-4439-adcf-7f2549890bba-kube-api-access-mvf9q\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065274 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-node-pullsecrets\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065342 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-auth-proxy-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065427 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-stats-auth\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065493 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065556 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065623 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-service-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065691 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-machine-approver-tls\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065764 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e031dd8a-d542-4dca-8bb7-12e36101c41e-serving-cert\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.065834 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.066505 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.067561 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-config\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.084738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.084979 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-image-import-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.094858 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.095295 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.097827 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.097946 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit-dir\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.063280 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.116018 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-encryption-config\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.119655 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.119891 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.121682 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-auth-proxy-config\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.122519 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.122703 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.123106 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.123643 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.123849 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-node-pullsecrets\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.123978 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-config\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.124647 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.125391 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5a608b5e-6d79-4439-adcf-7f2549890bba-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.125585 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-machine-approver-tls\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.126040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.126348 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-service-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.126472 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.126810 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-serving-ca\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.128366 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.128537 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/440d74af-4040-42bf-83cd-e13fb8526d17-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.128689 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-audit\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129295 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-serving-cert\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129488 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129496 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3650f5fa-7a17-4b65-8b55-5bb528beba58-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129756 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129780 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8pdqk"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129791 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129800 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129809 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129818 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-49g4p"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.129827 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-jfwcc"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130129 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130259 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130330 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130346 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-48whw"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130356 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130415 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.130695 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-etcd-client\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.131036 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/440d74af-4040-42bf-83cd-e13fb8526d17-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.132028 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e031dd8a-d542-4dca-8bb7-12e36101c41e-serving-cert\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.134093 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtlgd"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.136668 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e031dd8a-d542-4dca-8bb7-12e36101c41e-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.136909 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.138843 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.143234 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.143268 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.144455 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.145791 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.148491 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.152002 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8gn4w"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.154391 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-46ndj"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.156185 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-smrv8"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.157609 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-68w2v"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.159975 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d78kk"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.160087 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.161612 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.163958 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8q6sl"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.165856 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jfwcc"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.169216 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.172716 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.173997 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.174598 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.176671 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.176753 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.180651 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.180823 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.180893 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.182139 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.190673 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.190711 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tsm97"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.190725 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.190740 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-4sx67"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.192750 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2nsh7"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.196032 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.200423 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4sx67"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.200466 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2nsh7"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.200573 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.203321 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.210318 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.213306 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.215030 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-npwm9"] Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.216178 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.221392 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.240562 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.260932 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.280840 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.287343 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/81763dda-c34c-4bdf-a422-bbb5a76d8c95-metrics-tls\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.300574 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.320650 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.340523 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.351727 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-default-certificate\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.360580 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.372318 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-stats-auth\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.380241 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.392786 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b3554952-1f35-4ce9-9a10-1caa25c188fb-metrics-certs\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.403034 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.421374 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.427631 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b3554952-1f35-4ce9-9a10-1caa25c188fb-service-ca-bundle\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.441184 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.460354 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.470477 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5a608b5e-6d79-4439-adcf-7f2549890bba-proxy-tls\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.480897 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.500322 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.519691 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.527431 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d645a2f8-5d71-4d9e-9bfa-487388f618ca-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.539872 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.542450 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d645a2f8-5d71-4d9e-9bfa-487388f618ca-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.560624 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.581114 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.601445 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.608558 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/ff85be0b-4fe9-43fa-941f-c00f69b7f459-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.640705 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.660826 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.680343 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.700962 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.720570 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.740363 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.768459 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.781260 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.801110 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.820557 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.840342 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.860519 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.881080 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.901270 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.921399 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.941295 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.960978 4592 secret.go:188] Couldn't get secret openshift-oauth-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.961053 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config podName:34f42b2b-8a4b-41da-89cc-cd4da7edafe0 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.461034017 +0000 UTC m=+142.608811688 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config") pod "apiserver-7bbb656c7d-pb7hk" (UID: "34f42b2b-8a4b-41da-89cc-cd4da7edafe0") : failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.965316 4592 secret.go:188] Couldn't get secret openshift-oauth-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.965366 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client podName:34f42b2b-8a4b-41da-89cc-cd4da7edafe0 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.465353556 +0000 UTC m=+142.613131237 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client") pod "apiserver-7bbb656c7d-pb7hk" (UID: "34f42b2b-8a4b-41da-89cc-cd4da7edafe0") : failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.966447 4592 secret.go:188] Couldn't get secret openshift-oauth-apiserver/serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.966491 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert podName:34f42b2b-8a4b-41da-89cc-cd4da7edafe0 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.466479099 +0000 UTC m=+142.614256780 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert") pod "apiserver-7bbb656c7d-pb7hk" (UID: "34f42b2b-8a4b-41da-89cc-cd4da7edafe0") : failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.966521 4592 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.966581 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca podName:34f42b2b-8a4b-41da-89cc-cd4da7edafe0 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.466569112 +0000 UTC m=+142.614346793 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca") pod "apiserver-7bbb656c7d-pb7hk" (UID: "34f42b2b-8a4b-41da-89cc-cd4da7edafe0") : failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.966931 4592 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.967248 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert podName:a89d7bfa-d740-4792-8fef-d71c8da7559e nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.467227911 +0000 UTC m=+142.615005592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-789qk" (UID: "a89d7bfa-d740-4792-8fef-d71c8da7559e") : failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.978956 4592 request.go:700] Waited for 1.015283085s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift/token Sep 29 16:53:31 crc kubenswrapper[4592]: I0929 16:53:31.984774 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tffmm\" (UniqueName: \"kubernetes.io/projected/92eb6f89-2332-47d7-a04c-19e63442c882-kube-api-access-tffmm\") pod \"machine-api-operator-5694c8668f-smrv8\" (UID: \"92eb6f89-2332-47d7-a04c-19e63442c882\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.987476 4592 configmap.go:193] Couldn't get configMap openshift-apiserver-operator/openshift-apiserver-operator-config: failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:31 crc kubenswrapper[4592]: E0929 16:53:31.987587 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config podName:a89d7bfa-d740-4792-8fef-d71c8da7559e nodeName:}" failed. No retries permitted until 2025-09-29 16:53:32.487564766 +0000 UTC m=+142.635342507 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config") pod "openshift-apiserver-operator-796bbdcf4f-789qk" (UID: "a89d7bfa-d740-4792-8fef-d71c8da7559e") : failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.003220 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5h259\" (UniqueName: \"kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259\") pod \"oauth-openshift-558db77b4-5l86w\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.015054 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p657f\" (UniqueName: \"kubernetes.io/projected/e4394fc0-2772-479f-84e4-bbdb7d3b493a-kube-api-access-p657f\") pod \"downloads-7954f5f757-49g4p\" (UID: \"e4394fc0-2772-479f-84e4-bbdb7d3b493a\") " pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.016494 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.037456 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7xg4\" (UniqueName: \"kubernetes.io/projected/3aac1447-7fa7-4b9c-bc79-e194dba65129-kube-api-access-w7xg4\") pod \"console-operator-58897d9998-8pdqk\" (UID: \"3aac1447-7fa7-4b9c-bc79-e194dba65129\") " pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.050727 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.055893 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jz6w\" (UniqueName: \"kubernetes.io/projected/6d7df253-52f7-4764-8a61-fb4e2a389634-kube-api-access-5jz6w\") pod \"openshift-config-operator-7777fb866f-8qfjc\" (UID: \"6d7df253-52f7-4764-8a61-fb4e2a389634\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.077817 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qbzp\" (UniqueName: \"kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp\") pod \"route-controller-manager-6576b87f9c-g478g\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.093786 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.123217 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.142346 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.157028 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.160858 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.181933 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.200456 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.213411 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.221462 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.247781 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.258527 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-8pdqk"] Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.262244 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.275279 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.304106 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2mv5\" (UniqueName: \"kubernetes.io/projected/a89d7bfa-d740-4792-8fef-d71c8da7559e-kube-api-access-k2mv5\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.305688 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.325537 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.330799 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.333821 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-smrv8"] Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.342598 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Sep 29 16:53:32 crc kubenswrapper[4592]: W0929 16:53:32.347780 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92eb6f89_2332_47d7_a04c_19e63442c882.slice/crio-3fbec02f916756a8b19d98d1a2522c2f3358f4fcf49e3b58113d61fb5c1a7f6f WatchSource:0}: Error finding container 3fbec02f916756a8b19d98d1a2522c2f3358f4fcf49e3b58113d61fb5c1a7f6f: Status 404 returned error can't find the container with id 3fbec02f916756a8b19d98d1a2522c2f3358f4fcf49e3b58113d61fb5c1a7f6f Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.360408 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.380822 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.405007 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.415890 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-49g4p"] Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.420557 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Sep 29 16:53:32 crc kubenswrapper[4592]: W0929 16:53:32.423045 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4394fc0_2772_479f_84e4_bbdb7d3b493a.slice/crio-772074c0b9eecb42785073724bfc4e64ddce0ba8cd61f9ab014aaf74604ba50c WatchSource:0}: Error finding container 772074c0b9eecb42785073724bfc4e64ddce0ba8cd61f9ab014aaf74604ba50c: Status 404 returned error can't find the container with id 772074c0b9eecb42785073724bfc4e64ddce0ba8cd61f9ab014aaf74604ba50c Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.441438 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.443485 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc"] Sep 29 16:53:32 crc kubenswrapper[4592]: W0929 16:53:32.465388 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d7df253_52f7_4764_8a61_fb4e2a389634.slice/crio-19cda14c85fbb44256e1b27c879b00f0551d25e349dff884fb39dcbcf63f6033 WatchSource:0}: Error finding container 19cda14c85fbb44256e1b27c879b00f0551d25e349dff884fb39dcbcf63f6033: Status 404 returned error can't find the container with id 19cda14c85fbb44256e1b27c879b00f0551d25e349dff884fb39dcbcf63f6033 Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.465492 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.481280 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.483208 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.483311 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.483376 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.483401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.483418 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.501017 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.521114 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.543357 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.555172 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.560584 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: W0929 16:53:32.564656 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb9789af_7be4_40cf_a9da_df45fa8522f7.slice/crio-951f5c9751572196a1c5138272813ec6a4b6d9d7eefe52071970f219e6435e82 WatchSource:0}: Error finding container 951f5c9751572196a1c5138272813ec6a4b6d9d7eefe52071970f219e6435e82: Status 404 returned error can't find the container with id 951f5c9751572196a1c5138272813ec6a4b6d9d7eefe52071970f219e6435e82 Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.580093 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.585040 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.602907 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.620897 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.641368 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.661231 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.681901 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.701418 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.722253 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.741038 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.782949 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr624\" (UniqueName: \"kubernetes.io/projected/8bede80e-3f98-4ca7-be3d-20e7bc9ea19e-kube-api-access-fr624\") pod \"apiserver-76f77b778f-qtlgd\" (UID: \"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e\") " pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.796757 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5dhv\" (UniqueName: \"kubernetes.io/projected/ff85be0b-4fe9-43fa-941f-c00f69b7f459-kube-api-access-c5dhv\") pod \"control-plane-machine-set-operator-78cbb6b69f-m4z52\" (UID: \"ff85be0b-4fe9-43fa-941f-c00f69b7f459\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.812766 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hswdp\" (UniqueName: \"kubernetes.io/projected/81763dda-c34c-4bdf-a422-bbb5a76d8c95-kube-api-access-hswdp\") pod \"dns-operator-744455d44c-8gn4w\" (UID: \"81763dda-c34c-4bdf-a422-bbb5a76d8c95\") " pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.834337 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d645a2f8-5d71-4d9e-9bfa-487388f618ca-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-9dbw7\" (UID: \"d645a2f8-5d71-4d9e-9bfa-487388f618ca\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.855303 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxhp6\" (UniqueName: \"kubernetes.io/projected/7b20af08-54c7-4d8b-b2c2-6189a31c76e5-kube-api-access-rxhp6\") pod \"openshift-controller-manager-operator-756b6f6bc6-vjq8k\" (UID: \"7b20af08-54c7-4d8b-b2c2-6189a31c76e5\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.875749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-49g4p" event={"ID":"e4394fc0-2772-479f-84e4-bbdb7d3b493a","Type":"ContainerStarted","Data":"12c3b51e2cf47790eb69de823dae4ffe59591d3652e5fd7f582ba28c5b49dfbe"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.876100 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-49g4p" event={"ID":"e4394fc0-2772-479f-84e4-bbdb7d3b493a","Type":"ContainerStarted","Data":"772074c0b9eecb42785073724bfc4e64ddce0ba8cd61f9ab014aaf74604ba50c"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.876364 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.878293 4592 generic.go:334] "Generic (PLEG): container finished" podID="6d7df253-52f7-4764-8a61-fb4e2a389634" containerID="c4a8cd2336da4572ad73ac9a73b28aa7f4d4f1145f91f9980bbfddba61720415" exitCode=0 Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.878347 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" event={"ID":"6d7df253-52f7-4764-8a61-fb4e2a389634","Type":"ContainerDied","Data":"c4a8cd2336da4572ad73ac9a73b28aa7f4d4f1145f91f9980bbfddba61720415"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.878363 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" event={"ID":"6d7df253-52f7-4764-8a61-fb4e2a389634","Type":"ContainerStarted","Data":"19cda14c85fbb44256e1b27c879b00f0551d25e349dff884fb39dcbcf63f6033"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.880658 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" event={"ID":"92eb6f89-2332-47d7-a04c-19e63442c882","Type":"ContainerStarted","Data":"a4a32729af77a8eb7a373a7470ce27377256f77360eef84c39559f9a44e61a47"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.880702 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" event={"ID":"92eb6f89-2332-47d7-a04c-19e63442c882","Type":"ContainerStarted","Data":"9a1efdae8f8c5911f5b4eb80e7de18ccb2f58d4128e767fac245c0cd70deedf7"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.880715 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" event={"ID":"92eb6f89-2332-47d7-a04c-19e63442c882","Type":"ContainerStarted","Data":"3fbec02f916756a8b19d98d1a2522c2f3358f4fcf49e3b58113d61fb5c1a7f6f"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.883097 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" event={"ID":"e1aa3a51-f8e6-49a1-8013-74755f9c89b0","Type":"ContainerStarted","Data":"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.883124 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" event={"ID":"e1aa3a51-f8e6-49a1-8013-74755f9c89b0","Type":"ContainerStarted","Data":"e6524c4652a919395c4ba730c343b61b4628090a6ccd610edb50a57c14b051a1"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.883655 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.883726 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.883753 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.885246 4592 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-5l86w container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.885274 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.888396 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" event={"ID":"bb9789af-7be4-40cf-a9da-df45fa8522f7","Type":"ContainerStarted","Data":"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.888428 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" event={"ID":"bb9789af-7be4-40cf-a9da-df45fa8522f7","Type":"ContainerStarted","Data":"951f5c9751572196a1c5138272813ec6a4b6d9d7eefe52071970f219e6435e82"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.888925 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.890271 4592 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-g478g container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.890922 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.893036 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" event={"ID":"3aac1447-7fa7-4b9c-bc79-e194dba65129","Type":"ContainerStarted","Data":"97b56dfa7b4d9efddf5a8dca0e494c2628197d93d3646082a6e2ac27dc09cfa0"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.893107 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" event={"ID":"3aac1447-7fa7-4b9c-bc79-e194dba65129","Type":"ContainerStarted","Data":"83edf0d1cddc94bcdd409714a90110939b51e2d3a422689f03a7e3ee533359d9"} Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.893394 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-lvzxc\" (UID: \"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.893475 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.894581 4592 patch_prober.go:28] interesting pod/console-operator-58897d9998-8pdqk container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.894610 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" podUID="3aac1447-7fa7-4b9c-bc79-e194dba65129" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.901495 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfhj9\" (UniqueName: \"kubernetes.io/projected/57dcd71e-9bef-47f5-8512-d5eb7cd407b5-kube-api-access-bfhj9\") pod \"machine-approver-56656f9798-ggwzd\" (UID: \"57dcd71e-9bef-47f5-8512-d5eb7cd407b5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.919496 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjq6d\" (UniqueName: \"kubernetes.io/projected/e031dd8a-d542-4dca-8bb7-12e36101c41e-kube-api-access-sjq6d\") pod \"authentication-operator-69f744f599-68w2v\" (UID: \"e031dd8a-d542-4dca-8bb7-12e36101c41e\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.935566 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.940822 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zrpz\" (UniqueName: \"kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz\") pod \"controller-manager-879f6c89f-g2gnz\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.957275 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5nlk\" (UniqueName: \"kubernetes.io/projected/b3554952-1f35-4ce9-9a10-1caa25c188fb-kube-api-access-f5nlk\") pod \"router-default-5444994796-t4tpk\" (UID: \"b3554952-1f35-4ce9-9a10-1caa25c188fb\") " pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.959913 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.979758 4592 request.go:700] Waited for 1.852935459s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-cluster-samples-operator/serviceaccounts/cluster-samples-operator/token Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.981756 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" Sep 29 16:53:32 crc kubenswrapper[4592]: I0929 16:53:32.985853 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdcpp\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-kube-api-access-jdcpp\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:32.999983 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx2kn\" (UniqueName: \"kubernetes.io/projected/3650f5fa-7a17-4b65-8b55-5bb528beba58-kube-api-access-hx2kn\") pod \"cluster-samples-operator-665b6dd947-mczlm\" (UID: \"3650f5fa-7a17-4b65-8b55-5bb528beba58\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.007809 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.014131 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/440d74af-4040-42bf-83cd-e13fb8526d17-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-vvfmq\" (UID: \"440d74af-4040-42bf-83cd-e13fb8526d17\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.016537 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.023770 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.039798 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.044597 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvf9q\" (UniqueName: \"kubernetes.io/projected/5a608b5e-6d79-4439-adcf-7f2549890bba-kube-api-access-mvf9q\") pod \"machine-config-controller-84d6567774-48whw\" (UID: \"5a608b5e-6d79-4439-adcf-7f2549890bba\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.044984 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.046411 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.066279 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.066394 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.071557 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.072936 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.082015 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.093029 4592 projected.go:288] Couldn't get configMap openshift-oauth-apiserver/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.093057 4592 projected.go:194] Error preparing data for projected volume kube-api-access-ctztn for pod openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk: failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.093127 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn podName:34f42b2b-8a4b-41da-89cc-cd4da7edafe0 nodeName:}" failed. No retries permitted until 2025-09-29 16:53:33.593105631 +0000 UTC m=+143.740883312 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-ctztn" (UniqueName: "kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn") pod "apiserver-7bbb656c7d-pb7hk" (UID: "34f42b2b-8a4b-41da-89cc-cd4da7edafe0") : failed to sync configmap cache: timed out waiting for the condition Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.101659 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.124101 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.141162 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.159742 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.164675 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.182561 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.203250 4592 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.203350 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.220848 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.244366 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.249968 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qtlgd"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.265590 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.280817 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.320768 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.340803 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.368141 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.371981 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.377331 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a89d7bfa-d740-4792-8fef-d71c8da7559e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.381678 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.394642 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.394691 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-config\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.394716 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.401767 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405277 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405346 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405394 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-client\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405416 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405439 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-serving-cert\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405517 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405590 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405617 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z6bs\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405649 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glxzk\" (UniqueName: \"kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405679 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405713 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4zz7\" (UniqueName: \"kubernetes.io/projected/62388c6a-1394-47b9-90ac-fbb9aa780729-kube-api-access-g4zz7\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405793 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405847 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-service-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405875 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405916 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.405960 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.406010 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.408096 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:33.908076434 +0000 UTC m=+144.055854175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.424249 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.425038 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.429871 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-serving-cert\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.460687 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.474240 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-encryption-config\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.482588 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.483626 4592 secret.go:188] Couldn't get secret openshift-apiserver-operator/openshift-apiserver-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.483686 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert podName:a89d7bfa-d740-4792-8fef-d71c8da7559e nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.483669458 +0000 UTC m=+144.631447139 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert") pod "openshift-apiserver-operator-796bbdcf4f-789qk" (UID: "a89d7bfa-d740-4792-8fef-d71c8da7559e") : failed to sync secret cache: timed out waiting for the condition Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.491598 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-etcd-client\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.500956 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507633 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507804 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htb5d\" (UniqueName: \"kubernetes.io/projected/0bd227af-149e-4e98-bb4a-ce3fab42d945-kube-api-access-htb5d\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507885 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507906 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-srv-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507928 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hlw8\" (UniqueName: \"kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507948 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fv7hf\" (UniqueName: \"kubernetes.io/projected/bc578100-b929-4249-bd7e-de64d0469bb9-kube-api-access-fv7hf\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.507968 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a286ddd1-247b-4af4-b410-ad78a8d94595-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508000 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7022d695-153c-4fe6-a030-618c0dd54768-proxy-tls\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508031 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508049 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5157b08-9af0-4a76-a6a1-351020294cd0-trusted-ca\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508071 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-profile-collector-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508090 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508108 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508128 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-certs\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508293 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhgv2\" (UniqueName: \"kubernetes.io/projected/3f348c1f-2b11-4c89-a122-4b873c114126-kube-api-access-lhgv2\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508333 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a286ddd1-247b-4af4-b410-ad78a8d94595-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508370 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508452 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-key\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508486 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508530 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508548 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9ttl\" (UniqueName: \"kubernetes.io/projected/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-kube-api-access-w9ttl\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508563 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k86jt\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-kube-api-access-k86jt\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508578 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-registration-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508601 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508616 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94d01a53-233d-4e33-83c5-64a6200807b0-metrics-tls\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508673 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508698 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508766 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508794 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508814 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgw5p\" (UniqueName: \"kubernetes.io/projected/642112d6-34bb-4f6a-aafc-2be7ca427dd7-kube-api-access-lgw5p\") pod \"migrator-59844c95c7-d4pxx\" (UID: \"642112d6-34bb-4f6a-aafc-2be7ca427dd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508876 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508891 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508963 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z6bs\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508980 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd227af-149e-4e98-bb4a-ce3fab42d945-config\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.508997 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flgms\" (UniqueName: \"kubernetes.io/projected/efea8fdd-ab02-401a-b724-b66032ca838f-kube-api-access-flgms\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509022 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-images\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4zz7\" (UniqueName: \"kubernetes.io/projected/62388c6a-1394-47b9-90ac-fbb9aa780729-kube-api-access-g4zz7\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509094 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-mountpoint-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509112 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flqvh\" (UniqueName: \"kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509127 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509176 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-service-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509191 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509234 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5dp\" (UniqueName: \"kubernetes.io/projected/94d01a53-233d-4e33-83c5-64a6200807b0-kube-api-access-wv5dp\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509250 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmjql\" (UniqueName: \"kubernetes.io/projected/df3b90d4-ee10-47a5-a84f-5beb55894684-kube-api-access-kmjql\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509273 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-srv-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509287 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-csi-data-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.509307 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd227af-149e-4e98-bb4a-ce3fab42d945-serving-cert\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.509960 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.009943558 +0000 UTC m=+144.157721239 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.513888 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d5157b08-9af0-4a76-a6a1-351020294cd0-metrics-tls\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.513949 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.514040 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckqzf\" (UniqueName: \"kubernetes.io/projected/e001240c-c2f8-4102-9a25-8e4e16b1a07d-kube-api-access-ckqzf\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.514159 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.514605 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.515052 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-service-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.515873 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516318 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-webhook-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516424 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-config\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516499 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e001240c-c2f8-4102-9a25-8e4e16b1a07d-tmpfs\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516523 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-config\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516552 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-apiservice-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.516580 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsrcf\" (UniqueName: \"kubernetes.io/projected/3ee0e3db-f2c3-4b37-a024-8759009ed1df-kube-api-access-hsrcf\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.517383 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518463 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518831 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518905 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-client\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518937 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518962 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-serving-cert\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.518989 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bc578100-b929-4249-bd7e-de64d0469bb9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.523595 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efea8fdd-ab02-401a-b724-b66032ca838f-cert\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.523947 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.524014 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-socket-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.526016 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-config\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.526611 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-client\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.526814 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.526850 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-cabundle\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.526992 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527040 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-node-bootstrap-token\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527100 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glxzk\" (UniqueName: \"kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527126 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527319 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527407 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527480 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgx9p\" (UniqueName: \"kubernetes.io/projected/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-kube-api-access-lgx9p\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527511 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94d01a53-233d-4e33-83c5-64a6200807b0-config-volume\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527533 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn9hc\" (UniqueName: \"kubernetes.io/projected/a286ddd1-247b-4af4-b410-ad78a8d94595-kube-api-access-mn9hc\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527571 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-plugins-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.527601 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4wgz\" (UniqueName: \"kubernetes.io/projected/7022d695-153c-4fe6-a030-618c0dd54768-kube-api-access-q4wgz\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.527831 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.027818469 +0000 UTC m=+144.175596230 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.528055 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjs8b\" (UniqueName: \"kubernetes.io/projected/438aeabd-9678-4e33-8367-1008713f7438-kube-api-access-hjs8b\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.529054 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.530463 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.531990 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62388c6a-1394-47b9-90ac-fbb9aa780729-serving-cert\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.554050 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/62388c6a-1394-47b9-90ac-fbb9aa780729-etcd-ca\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.554218 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.555616 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.565966 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.581040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z6bs\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.588719 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4zz7\" (UniqueName: \"kubernetes.io/projected/62388c6a-1394-47b9-90ac-fbb9aa780729-kube-api-access-g4zz7\") pod \"etcd-operator-b45778765-8q6sl\" (UID: \"62388c6a-1394-47b9-90ac-fbb9aa780729\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.602758 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.628026 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glxzk\" (UniqueName: \"kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk\") pod \"console-f9d7485db-zn6hr\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.635587 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.635949 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a286ddd1-247b-4af4-b410-ad78a8d94595-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.636031 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-key\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.636096 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-registration-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639482 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctztn\" (UniqueName: \"kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639642 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9ttl\" (UniqueName: \"kubernetes.io/projected/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-kube-api-access-w9ttl\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639712 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k86jt\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-kube-api-access-k86jt\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639777 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639839 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94d01a53-233d-4e33-83c5-64a6200807b0-metrics-tls\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639925 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.639994 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgw5p\" (UniqueName: \"kubernetes.io/projected/642112d6-34bb-4f6a-aafc-2be7ca427dd7-kube-api-access-lgw5p\") pod \"migrator-59844c95c7-d4pxx\" (UID: \"642112d6-34bb-4f6a-aafc-2be7ca427dd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640100 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640241 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640331 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd227af-149e-4e98-bb4a-ce3fab42d945-config\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640404 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flgms\" (UniqueName: \"kubernetes.io/projected/efea8fdd-ab02-401a-b724-b66032ca838f-kube-api-access-flgms\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640470 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-images\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640544 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-mountpoint-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640610 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flqvh\" (UniqueName: \"kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640671 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640762 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640841 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5dp\" (UniqueName: \"kubernetes.io/projected/94d01a53-233d-4e33-83c5-64a6200807b0-kube-api-access-wv5dp\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640903 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmjql\" (UniqueName: \"kubernetes.io/projected/df3b90d4-ee10-47a5-a84f-5beb55894684-kube-api-access-kmjql\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.640969 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-srv-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641035 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-csi-data-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641099 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd227af-149e-4e98-bb4a-ce3fab42d945-serving-cert\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641180 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d5157b08-9af0-4a76-a6a1-351020294cd0-metrics-tls\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641261 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckqzf\" (UniqueName: \"kubernetes.io/projected/e001240c-c2f8-4102-9a25-8e4e16b1a07d-kube-api-access-ckqzf\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641353 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-webhook-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641429 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e001240c-c2f8-4102-9a25-8e4e16b1a07d-tmpfs\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641510 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-config\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641575 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-apiservice-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641637 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsrcf\" (UniqueName: \"kubernetes.io/projected/3ee0e3db-f2c3-4b37-a024-8759009ed1df-kube-api-access-hsrcf\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641706 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bc578100-b929-4249-bd7e-de64d0469bb9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641777 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efea8fdd-ab02-401a-b724-b66032ca838f-cert\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641861 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-socket-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641932 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-cabundle\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.641996 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642067 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-node-bootstrap-token\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642138 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642231 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-plugins-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4wgz\" (UniqueName: \"kubernetes.io/projected/7022d695-153c-4fe6-a030-618c0dd54768-kube-api-access-q4wgz\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642355 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgx9p\" (UniqueName: \"kubernetes.io/projected/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-kube-api-access-lgx9p\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642426 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94d01a53-233d-4e33-83c5-64a6200807b0-config-volume\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642497 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn9hc\" (UniqueName: \"kubernetes.io/projected/a286ddd1-247b-4af4-b410-ad78a8d94595-kube-api-access-mn9hc\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642567 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjs8b\" (UniqueName: \"kubernetes.io/projected/438aeabd-9678-4e33-8367-1008713f7438-kube-api-access-hjs8b\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642660 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htb5d\" (UniqueName: \"kubernetes.io/projected/0bd227af-149e-4e98-bb4a-ce3fab42d945-kube-api-access-htb5d\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642725 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-srv-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642796 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hlw8\" (UniqueName: \"kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642860 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fv7hf\" (UniqueName: \"kubernetes.io/projected/bc578100-b929-4249-bd7e-de64d0469bb9-kube-api-access-fv7hf\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642921 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a286ddd1-247b-4af4-b410-ad78a8d94595-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.642982 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7022d695-153c-4fe6-a030-618c0dd54768-proxy-tls\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5157b08-9af0-4a76-a6a1-351020294cd0-trusted-ca\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643122 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-profile-collector-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643208 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643272 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643332 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-certs\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.643403 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhgv2\" (UniqueName: \"kubernetes.io/projected/3f348c1f-2b11-4c89-a122-4b873c114126-kube-api-access-lhgv2\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.643853 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.143833974 +0000 UTC m=+144.291611655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.650203 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/e001240c-c2f8-4102-9a25-8e4e16b1a07d-tmpfs\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.651185 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a286ddd1-247b-4af4-b410-ad78a8d94595-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.653122 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-registration-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.654970 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-config\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.656380 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.657140 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-mountpoint-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.658879 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-images\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.661572 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7022d695-153c-4fe6-a030-618c0dd54768-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.661572 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a286ddd1-247b-4af4-b410-ad78a8d94595-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.673487 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0bd227af-149e-4e98-bb4a-ce3fab42d945-config\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.678854 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.680665 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.686305 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-csi-data-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.689750 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d5157b08-9af0-4a76-a6a1-351020294cd0-trusted-ca\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.694952 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.695026 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.695384 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0bd227af-149e-4e98-bb4a-ce3fab42d945-serving-cert\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.698402 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.701622 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-socket-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.702113 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7022d695-153c-4fe6-a030-618c0dd54768-proxy-tls\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.702365 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhgv2\" (UniqueName: \"kubernetes.io/projected/3f348c1f-2b11-4c89-a122-4b873c114126-kube-api-access-lhgv2\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.702496 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/df3b90d4-ee10-47a5-a84f-5beb55894684-plugins-dir\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.703330 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-cabundle\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.703531 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-srv-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.705338 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-srv-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.705857 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/94d01a53-233d-4e33-83c5-64a6200807b0-metrics-tls\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.712944 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/94d01a53-233d-4e33-83c5-64a6200807b0-config-volume\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.713487 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efea8fdd-ab02-401a-b724-b66032ca838f-cert\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.717668 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.717707 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-48whw"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.725104 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-webhook-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.730009 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-node-bootstrap-token\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.730611 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.731108 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3f348c1f-2b11-4c89-a122-4b873c114126-certs\") pod \"machine-config-server-npwm9\" (UID: \"3f348c1f-2b11-4c89-a122-4b873c114126\") " pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.731608 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e001240c-c2f8-4102-9a25-8e4e16b1a07d-apiservice-cert\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.731961 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.732471 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.732904 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3ee0e3db-f2c3-4b37-a024-8759009ed1df-signing-key\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.733541 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/bc578100-b929-4249-bd7e-de64d0469bb9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.734052 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctztn\" (UniqueName: \"kubernetes.io/projected/34f42b2b-8a4b-41da-89cc-cd4da7edafe0-kube-api-access-ctztn\") pod \"apiserver-7bbb656c7d-pb7hk\" (UID: \"34f42b2b-8a4b-41da-89cc-cd4da7edafe0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.734055 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d5157b08-9af0-4a76-a6a1-351020294cd0-metrics-tls\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.737417 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.738923 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmjql\" (UniqueName: \"kubernetes.io/projected/df3b90d4-ee10-47a5-a84f-5beb55894684-kube-api-access-kmjql\") pod \"csi-hostpathplugin-2nsh7\" (UID: \"df3b90d4-ee10-47a5-a84f-5beb55894684\") " pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.742057 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-8gn4w"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.743062 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-68w2v"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.746888 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/438aeabd-9678-4e33-8367-1008713f7438-profile-collector-cert\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.748524 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.752527 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.252496701 +0000 UTC m=+144.400274382 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.754052 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn9hc\" (UniqueName: \"kubernetes.io/projected/a286ddd1-247b-4af4-b410-ad78a8d94595-kube-api-access-mn9hc\") pod \"kube-storage-version-migrator-operator-b67b599dd-99jhd\" (UID: \"a286ddd1-247b-4af4-b410-ad78a8d94595\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.756897 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flqvh\" (UniqueName: \"kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh\") pod \"marketplace-operator-79b997595-hwj5f\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.760130 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsrcf\" (UniqueName: \"kubernetes.io/projected/3ee0e3db-f2c3-4b37-a024-8759009ed1df-kube-api-access-hsrcf\") pod \"service-ca-9c57cc56f-tsm97\" (UID: \"3ee0e3db-f2c3-4b37-a024-8759009ed1df\") " pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.767258 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.769572 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.784219 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.793934 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgw5p\" (UniqueName: \"kubernetes.io/projected/642112d6-34bb-4f6a-aafc-2be7ca427dd7-kube-api-access-lgw5p\") pod \"migrator-59844c95c7-d4pxx\" (UID: \"642112d6-34bb-4f6a-aafc-2be7ca427dd7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.795261 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" Sep 29 16:53:33 crc kubenswrapper[4592]: W0929 16:53:33.807356 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode031dd8a_d542_4dca_8bb7_12e36101c41e.slice/crio-e1b496fd46c3ed61f6c7217fdf123849fa493a1385fba9b5988ddc071734c474 WatchSource:0}: Error finding container e1b496fd46c3ed61f6c7217fdf123849fa493a1385fba9b5988ddc071734c474: Status 404 returned error can't find the container with id e1b496fd46c3ed61f6c7217fdf123849fa493a1385fba9b5988ddc071734c474 Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.808172 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjs8b\" (UniqueName: \"kubernetes.io/projected/438aeabd-9678-4e33-8367-1008713f7438-kube-api-access-hjs8b\") pod \"catalog-operator-68c6474976-msxgb\" (UID: \"438aeabd-9678-4e33-8367-1008713f7438\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.820747 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" Sep 29 16:53:33 crc kubenswrapper[4592]: W0929 16:53:33.826093 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81763dda_c34c_4bdf_a422_bbb5a76d8c95.slice/crio-bf10e5461917f2736f422f09ce56b44d73cab97d4d324e555e131ac3afa4a060 WatchSource:0}: Error finding container bf10e5461917f2736f422f09ce56b44d73cab97d4d324e555e131ac3afa4a060: Status 404 returned error can't find the container with id bf10e5461917f2736f422f09ce56b44d73cab97d4d324e555e131ac3afa4a060 Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.830193 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-npwm9" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.832660 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htb5d\" (UniqueName: \"kubernetes.io/projected/0bd227af-149e-4e98-bb4a-ce3fab42d945-kube-api-access-htb5d\") pod \"service-ca-operator-777779d784-d78kk\" (UID: \"0bd227af-149e-4e98-bb4a-ce3fab42d945\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.850809 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.851465 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.351415117 +0000 UTC m=+144.499192798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.860648 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hlw8\" (UniqueName: \"kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8\") pod \"collect-profiles-29319405-6822s\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.862516 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fv7hf\" (UniqueName: \"kubernetes.io/projected/bc578100-b929-4249-bd7e-de64d0469bb9-kube-api-access-fv7hf\") pod \"package-server-manager-789f6589d5-7h6m6\" (UID: \"bc578100-b929-4249-bd7e-de64d0469bb9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.869546 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.871379 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.879521 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5dp\" (UniqueName: \"kubernetes.io/projected/94d01a53-233d-4e33-83c5-64a6200807b0-kube-api-access-wv5dp\") pod \"dns-default-4sx67\" (UID: \"94d01a53-233d-4e33-83c5-64a6200807b0\") " pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.897660 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9ttl\" (UniqueName: \"kubernetes.io/projected/df0fe5a9-e5dd-40b0-8d51-addb3b8c8865-kube-api-access-w9ttl\") pod \"multus-admission-controller-857f4d67dd-46ndj\" (UID: \"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.914388 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" event={"ID":"d645a2f8-5d71-4d9e-9bfa-487388f618ca","Type":"ContainerStarted","Data":"8ef0bf9add5f78bfb4db3942a6bdfb7abf8123f88a2ce2c3f888b0a1bc90c0d5"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.917867 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" event={"ID":"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9","Type":"ContainerStarted","Data":"f2c075679cc43745110bb0ba9f62d4007b44e138f6561b19a66ff3c307c86767"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.924170 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" event={"ID":"5a608b5e-6d79-4439-adcf-7f2549890bba","Type":"ContainerStarted","Data":"e310309a645aa7296d2e8de7673e206426bf7ed4c52704b5149b5e664c2a5615"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.926954 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" event={"ID":"107b2d55-7d06-4091-b57c-bcf7c3635060","Type":"ContainerStarted","Data":"6ece491d53068e5d5d7eb3de6cf88e5c361060df58914262d90baabf6137858f"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.930580 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k86jt\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-kube-api-access-k86jt\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.931973 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" event={"ID":"81763dda-c34c-4bdf-a422-bbb5a76d8c95","Type":"ContainerStarted","Data":"bf10e5461917f2736f422f09ce56b44d73cab97d4d324e555e131ac3afa4a060"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.939478 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a3a24f23-fa27-4ee5-9899-39aac4ec8dcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-4m5cj\" (UID: \"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.939613 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" event={"ID":"e031dd8a-d542-4dca-8bb7-12e36101c41e","Type":"ContainerStarted","Data":"e1b496fd46c3ed61f6c7217fdf123849fa493a1385fba9b5988ddc071734c474"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.953880 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8q6sl"] Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.956600 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:33 crc kubenswrapper[4592]: E0929 16:53:33.957029 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.457015693 +0000 UTC m=+144.604793374 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.958623 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flgms\" (UniqueName: \"kubernetes.io/projected/efea8fdd-ab02-401a-b724-b66032ca838f-kube-api-access-flgms\") pod \"ingress-canary-jfwcc\" (UID: \"efea8fdd-ab02-401a-b724-b66032ca838f\") " pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.959377 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" event={"ID":"57dcd71e-9bef-47f5-8512-d5eb7cd407b5","Type":"ContainerStarted","Data":"b4190ce2643d482f94044f5ab110a8802908dd4f98d8e720ab36315d9a3b0277"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.959412 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" event={"ID":"57dcd71e-9bef-47f5-8512-d5eb7cd407b5","Type":"ContainerStarted","Data":"06bed57132ea63ad37fd5bd16bec4b5114b6fa78330badc2fef58004a5c31401"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.962944 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" event={"ID":"440d74af-4040-42bf-83cd-e13fb8526d17","Type":"ContainerStarted","Data":"0023c1aa8bda97d526be35025375bc485ebed1842aea7df2bda1c34120e7a1c5"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.966986 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" event={"ID":"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e","Type":"ContainerStarted","Data":"b1284340d33e603a01ca17149a985c42c11f79673fac030ed168bc1f1165ee58"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.972832 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" event={"ID":"ff85be0b-4fe9-43fa-941f-c00f69b7f459","Type":"ContainerStarted","Data":"e7bda58a696ea586fe5e6c974d5814cb76f90cbba63ffa1ab18ca46b91b06bff"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.974346 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-t4tpk" event={"ID":"b3554952-1f35-4ce9-9a10-1caa25c188fb","Type":"ContainerStarted","Data":"e7a5b147c7b5496fb47b74d98bd508a4e11523c3875254c759e51136cf271b9e"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.974374 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-t4tpk" event={"ID":"b3554952-1f35-4ce9-9a10-1caa25c188fb","Type":"ContainerStarted","Data":"f971e322599a582d0a7d5bdd0e65e3a652b20940cf32e9c6e261c9565e5ad485"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.989581 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.990043 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.989791 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" event={"ID":"7b20af08-54c7-4d8b-b2c2-6189a31c76e5","Type":"ContainerStarted","Data":"b26674f39db7d2cd427802b70fa69bf3f99c5f6d2f502b5b0c879d023dadeeb5"} Sep 29 16:53:33 crc kubenswrapper[4592]: I0929 16:53:33.990860 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" event={"ID":"7b20af08-54c7-4d8b-b2c2-6189a31c76e5","Type":"ContainerStarted","Data":"5b12d742cc4e7e9642771308c9d59df21e311c619ab5ed6a3c04e0e81696ae6c"} Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.008171 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.014418 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.021849 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" event={"ID":"6d7df253-52f7-4764-8a61-fb4e2a389634","Type":"ContainerStarted","Data":"5a7251c73f1df5e9e0738a85e1c2d2ae5bc74cbcad7a74f99e45afc33a3a89d4"} Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.022953 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025443 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025914 4592 patch_prober.go:28] interesting pod/console-operator-58897d9998-8pdqk container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025953 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" podUID="3aac1447-7fa7-4b9c-bc79-e194dba65129" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.7:8443/readyz\": dial tcp 10.217.0.7:8443: connect: connection refused" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025970 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025926 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgx9p\" (UniqueName: \"kubernetes.io/projected/1faaded6-9ed5-4eef-9df7-bd5d8363ea14-kube-api-access-lgx9p\") pod \"olm-operator-6b444d44fb-hmszm\" (UID: \"1faaded6-9ed5-4eef-9df7-bd5d8363ea14\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025991 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.025967 4592 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-5l86w container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" start-of-body= Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.026021 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.5:6443/healthz\": dial tcp 10.217.0.5:6443: connect: connection refused" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.028727 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d5157b08-9af0-4a76-a6a1-351020294cd0-bound-sa-token\") pod \"ingress-operator-5b745b69d9-rjjgh\" (UID: \"d5157b08-9af0-4a76-a6a1-351020294cd0\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.031578 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.038407 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.051360 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckqzf\" (UniqueName: \"kubernetes.io/projected/e001240c-c2f8-4102-9a25-8e4e16b1a07d-kube-api-access-ckqzf\") pod \"packageserver-d55dfcdfc-zjh4r\" (UID: \"e001240c-c2f8-4102-9a25-8e4e16b1a07d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.051515 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.052352 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.052399 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.056029 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.058005 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.060376 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.560297869 +0000 UTC m=+144.708075620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.060586 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:34 crc kubenswrapper[4592]: W0929 16:53:34.065033 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f348c1f_2b11_4c89_a122_4b873c114126.slice/crio-243eb23ed4b3d6132147fe61894a85b9de7aff46a5b6b3491bcd2b37878fd9ac WatchSource:0}: Error finding container 243eb23ed4b3d6132147fe61894a85b9de7aff46a5b6b3491bcd2b37878fd9ac: Status 404 returned error can't find the container with id 243eb23ed4b3d6132147fe61894a85b9de7aff46a5b6b3491bcd2b37878fd9ac Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.066262 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.074014 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4wgz\" (UniqueName: \"kubernetes.io/projected/7022d695-153c-4fe6-a030-618c0dd54768-kube-api-access-q4wgz\") pod \"machine-config-operator-74547568cd-nncw8\" (UID: \"7022d695-153c-4fe6-a030-618c0dd54768\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.074319 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.094030 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-jfwcc" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.108764 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.159879 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.160508 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.660483244 +0000 UTC m=+144.808260925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.263892 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd"] Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.291127 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.291277 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.791257957 +0000 UTC m=+144.939035638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.291424 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.291767 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.791758172 +0000 UTC m=+144.939535853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.292189 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.300198 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.388078 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.392128 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.392294 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.892272667 +0000 UTC m=+145.040050358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.392341 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.392724 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.89271404 +0000 UTC m=+145.040491721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.493587 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.493729 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.993704138 +0000 UTC m=+145.141481829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.494120 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.494197 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.494448 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:34.99444126 +0000 UTC m=+145.142218941 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.503439 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a89d7bfa-d740-4792-8fef-d71c8da7559e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-789qk\" (UID: \"a89d7bfa-d740-4792-8fef-d71c8da7559e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.519570 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.566398 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk"] Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.595455 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.595761 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.095743329 +0000 UTC m=+145.243521020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.627638 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-2nsh7"] Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.643454 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.698055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.698371 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.198354465 +0000 UTC m=+145.346132146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.799524 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.799624 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.299608731 +0000 UTC m=+145.447386412 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.799887 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.800186 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.300178039 +0000 UTC m=+145.447955720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:34 crc kubenswrapper[4592]: W0929 16:53:34.814634 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda286ddd1_247b_4af4_b410_ad78a8d94595.slice/crio-9ccd279ab2ad743749e509191a49beef7b779e2dd3662fc868a69cfeeec33186 WatchSource:0}: Error finding container 9ccd279ab2ad743749e509191a49beef7b779e2dd3662fc868a69cfeeec33186: Status 404 returned error can't find the container with id 9ccd279ab2ad743749e509191a49beef7b779e2dd3662fc868a69cfeeec33186 Sep 29 16:53:34 crc kubenswrapper[4592]: W0929 16:53:34.818848 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode586a2e2_918f_40e6_b7eb_9e937dd20c32.slice/crio-310d361df25db221565ce0c13514fecb91633ca5b6a199b81390e80ef0e487e2 WatchSource:0}: Error finding container 310d361df25db221565ce0c13514fecb91633ca5b6a199b81390e80ef0e487e2: Status 404 returned error can't find the container with id 310d361df25db221565ce0c13514fecb91633ca5b6a199b81390e80ef0e487e2 Sep 29 16:53:34 crc kubenswrapper[4592]: I0929 16:53:34.901087 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:34 crc kubenswrapper[4592]: E0929 16:53:34.901714 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.401670582 +0000 UTC m=+145.549448253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.002370 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.002706 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.502691082 +0000 UTC m=+145.650468803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.024746 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" event={"ID":"62388c6a-1394-47b9-90ac-fbb9aa780729","Type":"ContainerStarted","Data":"77d5ec5499cde21c706a95699c1db2a78a8915c2d971723dbded6f2d9392a646"} Sep 29 16:53:35 crc kubenswrapper[4592]: W0929 16:53:35.039339 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34f42b2b_8a4b_41da_89cc_cd4da7edafe0.slice/crio-7e4babf95cba47b5eeff281006a1a354bba8fc09f78947de44cc0f53e37b3ddb WatchSource:0}: Error finding container 7e4babf95cba47b5eeff281006a1a354bba8fc09f78947de44cc0f53e37b3ddb: Status 404 returned error can't find the container with id 7e4babf95cba47b5eeff281006a1a354bba8fc09f78947de44cc0f53e37b3ddb Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.039908 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-npwm9" event={"ID":"3f348c1f-2b11-4c89-a122-4b873c114126","Type":"ContainerStarted","Data":"243eb23ed4b3d6132147fe61894a85b9de7aff46a5b6b3491bcd2b37878fd9ac"} Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.041981 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" event={"ID":"ff85be0b-4fe9-43fa-941f-c00f69b7f459","Type":"ContainerStarted","Data":"863237b1f72a0f7c483cc28e6619545395cd924b17fc6ff30b3324596ee35ef9"} Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.043452 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zn6hr" event={"ID":"e586a2e2-918f-40e6-b7eb-9e937dd20c32","Type":"ContainerStarted","Data":"310d361df25db221565ce0c13514fecb91633ca5b6a199b81390e80ef0e487e2"} Sep 29 16:53:35 crc kubenswrapper[4592]: W0929 16:53:35.045395 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf3b90d4_ee10_47a5_a84f_5beb55894684.slice/crio-9e1180abd982de33f22114a811651a3b1bf7adf889bcacabe909f2ffac46bd90 WatchSource:0}: Error finding container 9e1180abd982de33f22114a811651a3b1bf7adf889bcacabe909f2ffac46bd90: Status 404 returned error can't find the container with id 9e1180abd982de33f22114a811651a3b1bf7adf889bcacabe909f2ffac46bd90 Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.045426 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" event={"ID":"3650f5fa-7a17-4b65-8b55-5bb528beba58","Type":"ContainerStarted","Data":"331d2da324a10e839daf67f036842ee1561ccdcd4d9dc67929f3b052348b4135"} Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.047985 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.048116 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.055717 4592 generic.go:334] "Generic (PLEG): container finished" podID="8bede80e-3f98-4ca7-be3d-20e7bc9ea19e" containerID="2b37905b775a545f0167275a7c29e80ea6dee2fe038b6130931e18741108dde3" exitCode=0 Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.055885 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" event={"ID":"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e","Type":"ContainerDied","Data":"2b37905b775a545f0167275a7c29e80ea6dee2fe038b6130931e18741108dde3"} Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.062268 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" event={"ID":"a286ddd1-247b-4af4-b410-ad78a8d94595","Type":"ContainerStarted","Data":"9ccd279ab2ad743749e509191a49beef7b779e2dd3662fc868a69cfeeec33186"} Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.104164 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.104311 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.604288808 +0000 UTC m=+145.752066489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.104373 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.104688 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.604679619 +0000 UTC m=+145.752457300 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.205843 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.206886 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.706859254 +0000 UTC m=+145.854636935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.211980 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" podStartSLOduration=124.211964916 podStartE2EDuration="2m4.211964916s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.173246516 +0000 UTC m=+145.321024207" watchObservedRunningTime="2025-09-29 16:53:35.211964916 +0000 UTC m=+145.359742597" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.251167 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vjq8k" podStartSLOduration=123.251134419 podStartE2EDuration="2m3.251134419s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.250329444 +0000 UTC m=+145.398107125" watchObservedRunningTime="2025-09-29 16:53:35.251134419 +0000 UTC m=+145.398912100" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.294201 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-t4tpk" podStartSLOduration=123.294184057 podStartE2EDuration="2m3.294184057s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.29196011 +0000 UTC m=+145.439737811" watchObservedRunningTime="2025-09-29 16:53:35.294184057 +0000 UTC m=+145.441961738" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.307968 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.308297 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.808286125 +0000 UTC m=+145.956063806 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.394682 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" podStartSLOduration=124.394622279 podStartE2EDuration="2m4.394622279s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.390930889 +0000 UTC m=+145.538708590" watchObservedRunningTime="2025-09-29 16:53:35.394622279 +0000 UTC m=+145.542399950" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.409343 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.409699 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.909640585 +0000 UTC m=+146.057418276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.410363 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.411124 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:35.911088018 +0000 UTC m=+146.058865699 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.492666 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-49g4p" podStartSLOduration=123.492634779 podStartE2EDuration="2m3.492634779s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.491706561 +0000 UTC m=+145.639484252" watchObservedRunningTime="2025-09-29 16:53:35.492634779 +0000 UTC m=+145.640412460" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.512581 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.513005 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.012987173 +0000 UTC m=+146.160764854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.537231 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-m4z52" podStartSLOduration=123.537209993 podStartE2EDuration="2m3.537209993s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.532031509 +0000 UTC m=+145.679809190" watchObservedRunningTime="2025-09-29 16:53:35.537209993 +0000 UTC m=+145.684987674" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.624812 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.625233 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.125132393 +0000 UTC m=+146.272910074 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.725892 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.726220 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.226205644 +0000 UTC m=+146.373983325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.731939 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-smrv8" podStartSLOduration=123.731926074 podStartE2EDuration="2m3.731926074s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.696582745 +0000 UTC m=+145.844360426" watchObservedRunningTime="2025-09-29 16:53:35.731926074 +0000 UTC m=+145.879703755" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.806721 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-jfwcc"] Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.822681 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx"] Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.834979 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.835956 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.836412 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.336399116 +0000 UTC m=+146.484176797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.866681 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" podStartSLOduration=124.866655304 podStartE2EDuration="2m4.866655304s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.865399277 +0000 UTC m=+146.013176958" watchObservedRunningTime="2025-09-29 16:53:35.866655304 +0000 UTC m=+146.014432985" Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.937842 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:35 crc kubenswrapper[4592]: E0929 16:53:35.938461 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.438440396 +0000 UTC m=+146.586218077 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:35 crc kubenswrapper[4592]: I0929 16:53:35.981376 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" podStartSLOduration=123.98136093 podStartE2EDuration="2m3.98136093s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:35.948791354 +0000 UTC m=+146.096569035" watchObservedRunningTime="2025-09-29 16:53:35.98136093 +0000 UTC m=+146.129138611" Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.039366 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.039643 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.539632061 +0000 UTC m=+146.687409742 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.049318 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.049381 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.101957 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" event={"ID":"d645a2f8-5d71-4d9e-9bfa-487388f618ca","Type":"ContainerStarted","Data":"962f4596e7bb746fde9219414cb8fc7d9b547d1a1f8496cd6481e1540994ec43"} Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.108497 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" event={"ID":"df3b90d4-ee10-47a5-a84f-5beb55894684","Type":"ContainerStarted","Data":"9e1180abd982de33f22114a811651a3b1bf7adf889bcacabe909f2ffac46bd90"} Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.114063 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" event={"ID":"34f42b2b-8a4b-41da-89cc-cd4da7edafe0","Type":"ContainerStarted","Data":"7e4babf95cba47b5eeff281006a1a354bba8fc09f78947de44cc0f53e37b3ddb"} Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.115884 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" event={"ID":"107b2d55-7d06-4091-b57c-bcf7c3635060","Type":"ContainerStarted","Data":"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78"} Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.117435 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" event={"ID":"440d74af-4040-42bf-83cd-e13fb8526d17","Type":"ContainerStarted","Data":"ede8a13356ad1feae6fa8ff2d2d6de230134fe5843554eddc77baa6c9916d212"} Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.141588 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.141906 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.641888626 +0000 UTC m=+146.789666307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.227666 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-9dbw7" podStartSLOduration=124.227650043 podStartE2EDuration="2m4.227650043s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:36.192740917 +0000 UTC m=+146.340518608" watchObservedRunningTime="2025-09-29 16:53:36.227650043 +0000 UTC m=+146.375427724" Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.232408 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.244452 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.246481 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.746469712 +0000 UTC m=+146.894247393 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.257908 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-d78kk"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.349280 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.350031 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.850000476 +0000 UTC m=+146.997778157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.450840 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.451117 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:36.951101038 +0000 UTC m=+147.098878719 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.453438 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.469359 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-46ndj"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.548010 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.557536 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.557685 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.057642732 +0000 UTC m=+147.205420413 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.559004 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.559517 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.059482507 +0000 UTC m=+147.207260188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: W0929 16:53:36.589413 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf0fe5a9_e5dd_40b0_8d51_addb3b8c8865.slice/crio-ea23289c513f465e26cf3e88d6ad9d54ea211ea780228f7532b8f4011a206aaf WatchSource:0}: Error finding container ea23289c513f465e26cf3e88d6ad9d54ea211ea780228f7532b8f4011a206aaf: Status 404 returned error can't find the container with id ea23289c513f465e26cf3e88d6ad9d54ea211ea780228f7532b8f4011a206aaf Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.660289 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.660565 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.160551927 +0000 UTC m=+147.308329608 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.672860 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.710454 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.748323 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.755031 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.766833 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.767291 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.267276916 +0000 UTC m=+147.415054597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.771450 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tsm97"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.867744 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.870572 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.370533252 +0000 UTC m=+147.518310933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.872257 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.872770 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.372757808 +0000 UTC m=+147.520535479 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.881860 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.890565 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.944486 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-4sx67"] Sep 29 16:53:36 crc kubenswrapper[4592]: I0929 16:53:36.972710 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:36 crc kubenswrapper[4592]: E0929 16:53:36.973018 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.473001514 +0000 UTC m=+147.620779195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.053566 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:37 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:37 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:37 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.056727 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.074759 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.075131 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.575114197 +0000 UTC m=+147.722891878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.174318 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" event={"ID":"57dcd71e-9bef-47f5-8512-d5eb7cd407b5","Type":"ContainerStarted","Data":"6a16b87e6300378ff0e323a3c522c73beddf3dec0e9162ea86d65e6e2a12f79b"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.176525 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.177058 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.676979051 +0000 UTC m=+147.824756732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.193479 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" event={"ID":"1faaded6-9ed5-4eef-9df7-bd5d8363ea14","Type":"ContainerStarted","Data":"af1199f140f4a0f2ecf765027659a13a0eb174b2b224dba1861b490a7ca67c02"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.207283 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8"] Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.209281 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ggwzd" podStartSLOduration=126.20926639 podStartE2EDuration="2m6.20926639s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.204957752 +0000 UTC m=+147.352735433" watchObservedRunningTime="2025-09-29 16:53:37.20926639 +0000 UTC m=+147.357044071" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.211583 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4sx67" event={"ID":"94d01a53-233d-4e33-83c5-64a6200807b0","Type":"ContainerStarted","Data":"273a41bb56d617a0279476bbeddb5d578ea5abe087bb6d0660b97d09ecb7c903"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.225333 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" event={"ID":"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd","Type":"ContainerStarted","Data":"37a61f7caa43db3020cd4d5846ffbddd1adc5b307ef154d2c43f6b1d2d690030"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.231942 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" event={"ID":"e001240c-c2f8-4102-9a25-8e4e16b1a07d","Type":"ContainerStarted","Data":"d5c0072d89f8bcb791b420e3d2d09989304eeb112e6b6b34104dd72f0d546199"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.236869 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" event={"ID":"a286ddd1-247b-4af4-b410-ad78a8d94595","Type":"ContainerStarted","Data":"be5294545bbc2c03e5376eb7982abf157ad10504161ecd5b94ab9f7c8300655a"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.245439 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" event={"ID":"3ee0e3db-f2c3-4b37-a024-8759009ed1df","Type":"ContainerStarted","Data":"e0556078e62470163b5c7d477254fe50fdb20dd485a1ca00741d5a93921f905b"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.246810 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" event={"ID":"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865","Type":"ContainerStarted","Data":"ea23289c513f465e26cf3e88d6ad9d54ea211ea780228f7532b8f4011a206aaf"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.278097 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.279162 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.779126224 +0000 UTC m=+147.926903915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.280695 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" event={"ID":"81763dda-c34c-4bdf-a422-bbb5a76d8c95","Type":"ContainerStarted","Data":"eedb165b6064c38539d9a2d1402921c9d9a5f389f4ad310619cea5bcaeb2acc7"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.293001 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" event={"ID":"542d754d-bd15-40b7-8208-876f318413a9","Type":"ContainerStarted","Data":"6a0fb8fb40764ac1c80a5d1ef803e7b918a1ded96027232a3391ac6a398a1b69"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.312540 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" event={"ID":"0bd227af-149e-4e98-bb4a-ce3fab42d945","Type":"ContainerStarted","Data":"ed47a144cd9c09c45605dc03147e64c85cb3a714d81491ca5322cc856f2e7743"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.326973 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" event={"ID":"bc578100-b929-4249-bd7e-de64d0469bb9","Type":"ContainerStarted","Data":"4f957d25fc4553bf53ba07794ebb258c039f4952118c897804b9605bd228af18"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.331207 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" event={"ID":"438aeabd-9678-4e33-8367-1008713f7438","Type":"ContainerStarted","Data":"195810d21ac7cc3acc01b279e64f68c66f94613b820ce56c69cffba58e9780ca"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.336757 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" event={"ID":"5a608b5e-6d79-4439-adcf-7f2549890bba","Type":"ContainerStarted","Data":"f3a75dd1b44e7312e40c8455fb0dfe0897bb7a74f1b9c0db21749a19b716f37c"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.339054 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zn6hr" event={"ID":"e586a2e2-918f-40e6-b7eb-9e937dd20c32","Type":"ContainerStarted","Data":"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.341670 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" event={"ID":"c09a496e-48c6-43f8-a08f-5ec0bfe6f3e9","Type":"ContainerStarted","Data":"0e1140f54522534b3786ef6dc3250932212655d5cc4ca355ae34210dbc27c4e9"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.350893 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" event={"ID":"642112d6-34bb-4f6a-aafc-2be7ca427dd7","Type":"ContainerStarted","Data":"828d08094290c6399253bc2a56caf598d3e4ca4b36b24f707f7cc5b4ea7888f5"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.350941 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" event={"ID":"642112d6-34bb-4f6a-aafc-2be7ca427dd7","Type":"ContainerStarted","Data":"85f1005260fd727e5f6293dd2fa4d76ea5d6821d279742d6bdcc248adda618d9"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.353490 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-npwm9" event={"ID":"3f348c1f-2b11-4c89-a122-4b873c114126","Type":"ContainerStarted","Data":"e0c2aabe679e541e658eb427b03872c15358f0341efef4e53a33f3d7c6bf71bc"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.360928 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jfwcc" event={"ID":"efea8fdd-ab02-401a-b724-b66032ca838f","Type":"ContainerStarted","Data":"5bc009f03a85034276a971df9c64be612bec7f811798c2d1c909889c161c3c11"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.360980 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-jfwcc" event={"ID":"efea8fdd-ab02-401a-b724-b66032ca838f","Type":"ContainerStarted","Data":"d05388604d95f46dedd45ebb6fde9b95e385f9b794e1d27936016e029628da0c"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.361928 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-99jhd" podStartSLOduration=125.361914442 podStartE2EDuration="2m5.361914442s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.260412619 +0000 UTC m=+147.408190320" watchObservedRunningTime="2025-09-29 16:53:37.361914442 +0000 UTC m=+147.509692123" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.362721 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-zn6hr" podStartSLOduration=125.362713116 podStartE2EDuration="2m5.362713116s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.355930515 +0000 UTC m=+147.503708196" watchObservedRunningTime="2025-09-29 16:53:37.362713116 +0000 UTC m=+147.510490797" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.371600 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-lvzxc" podStartSLOduration=125.371581039 podStartE2EDuration="2m5.371581039s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.370856518 +0000 UTC m=+147.518634199" watchObservedRunningTime="2025-09-29 16:53:37.371581039 +0000 UTC m=+147.519358720" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.379590 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.380827 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.880812014 +0000 UTC m=+148.028589695 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.384615 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" event={"ID":"25870e3b-7737-4e6a-9ac7-a003d45c140b","Type":"ContainerStarted","Data":"3da83bafaff9c27e5ebffc69c904121631cc9f2956fa27d5bb6d9f5388b089c6"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.387652 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" event={"ID":"a89d7bfa-d740-4792-8fef-d71c8da7559e","Type":"ContainerStarted","Data":"54efc252620fb70a73c90df458fb0a78db61d23ccc6b36c0c5dd71d00295318d"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.399660 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" event={"ID":"3650f5fa-7a17-4b65-8b55-5bb528beba58","Type":"ContainerStarted","Data":"f5aaf6ea3d3e52c2896b4bee8667dc01c3d3662677b1e01bb080ae7084eda907"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.401090 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-npwm9" podStartSLOduration=6.401073025 podStartE2EDuration="6.401073025s" podCreationTimestamp="2025-09-29 16:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.400384565 +0000 UTC m=+147.548162286" watchObservedRunningTime="2025-09-29 16:53:37.401073025 +0000 UTC m=+147.548850706" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.414214 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" event={"ID":"d5157b08-9af0-4a76-a6a1-351020294cd0","Type":"ContainerStarted","Data":"68f52b9494e5c5bf3d929f8d53fcbd424ee93cb352a1fe7835e28556d21fb422"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.424693 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-jfwcc" podStartSLOduration=7.424669885 podStartE2EDuration="7.424669885s" podCreationTimestamp="2025-09-29 16:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.424657955 +0000 UTC m=+147.572435646" watchObservedRunningTime="2025-09-29 16:53:37.424669885 +0000 UTC m=+147.572447576" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.459636 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" event={"ID":"e031dd8a-d542-4dca-8bb7-12e36101c41e","Type":"ContainerStarted","Data":"dd816be02905be08ac4b95c3d91f88a08c05c6b636abda794210abdca450948d"} Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.460211 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.476104 4592 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g2gnz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.476177 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.490570 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.491001 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:37.990988275 +0000 UTC m=+148.138765956 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.504408 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-68w2v" podStartSLOduration=126.504388923 podStartE2EDuration="2m6.504388923s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.502343232 +0000 UTC m=+147.650120913" watchObservedRunningTime="2025-09-29 16:53:37.504388923 +0000 UTC m=+147.652166604" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.544069 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" podStartSLOduration=125.54404991 podStartE2EDuration="2m5.54404991s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.54300785 +0000 UTC m=+147.690785541" watchObservedRunningTime="2025-09-29 16:53:37.54404991 +0000 UTC m=+147.691827591" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.576250 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-vvfmq" podStartSLOduration=125.576235536 podStartE2EDuration="2m5.576235536s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:37.575669159 +0000 UTC m=+147.723446840" watchObservedRunningTime="2025-09-29 16:53:37.576235536 +0000 UTC m=+147.724013217" Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.593353 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.595531 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.095516308 +0000 UTC m=+148.243293989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.695980 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.703403 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.203374901 +0000 UTC m=+148.351152582 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.805167 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.805513 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.305498603 +0000 UTC m=+148.453276284 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:37 crc kubenswrapper[4592]: I0929 16:53:37.906783 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:37 crc kubenswrapper[4592]: E0929 16:53:37.907222 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.407207154 +0000 UTC m=+148.554984835 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.007693 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.007833 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.507812491 +0000 UTC m=+148.655590192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.008019 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.008372 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.508362787 +0000 UTC m=+148.656140468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.049984 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:38 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:38 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:38 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.050080 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.108474 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.108875 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.608859781 +0000 UTC m=+148.756637462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.213555 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.214541 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.714527748 +0000 UTC m=+148.862305429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.227308 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8qfjc" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.317192 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.318188 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.818174065 +0000 UTC m=+148.965951746 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.423840 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.424287 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:38.924270816 +0000 UTC m=+149.072048497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.473708 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" event={"ID":"62388c6a-1394-47b9-90ac-fbb9aa780729","Type":"ContainerStarted","Data":"66e008052f887b0d459dde8a36bcf10b18a27ffbf676db5419e24a22e9e99c5a"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.478982 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" event={"ID":"438aeabd-9678-4e33-8367-1008713f7438","Type":"ContainerStarted","Data":"6c4d06eedd336af36d10b2284e6d737b341898b0bce9f2dce60978babcb977f0"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.480766 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.480929 4592 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-msxgb container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.481016 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" podUID="438aeabd-9678-4e33-8367-1008713f7438" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.484991 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" event={"ID":"a89d7bfa-d740-4792-8fef-d71c8da7559e","Type":"ContainerStarted","Data":"d77028fb328e8c6be68e400922b4cf37b9c558f5908ccc2399b9230491367f13"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.494529 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" event={"ID":"5a608b5e-6d79-4439-adcf-7f2549890bba","Type":"ContainerStarted","Data":"e9b3c8d9d6c63778e2088b59a93b63264a14c130439b74b111881702618c2e4b"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.499820 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" event={"ID":"3650f5fa-7a17-4b65-8b55-5bb528beba58","Type":"ContainerStarted","Data":"4609b4c685007770ee6bee4f771d4c359e6a20d9785bc52ebc00c14614c350a1"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.505439 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4sx67" event={"ID":"94d01a53-233d-4e33-83c5-64a6200807b0","Type":"ContainerStarted","Data":"a46ba54e0f688970d42ad9c5c418d77a44b7a137194f7c56150953492b89c8c1"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.509908 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" event={"ID":"d5157b08-9af0-4a76-a6a1-351020294cd0","Type":"ContainerStarted","Data":"efc36894974474a574d56b8193cc3fbc6efcd350deb5b1d80f6478488ca5686c"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.510137 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" event={"ID":"d5157b08-9af0-4a76-a6a1-351020294cd0","Type":"ContainerStarted","Data":"0b3bc239991ddb1cbafc215d34319244c7745432662b4230688544aa6d3471c6"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.513515 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" event={"ID":"e001240c-c2f8-4102-9a25-8e4e16b1a07d","Type":"ContainerStarted","Data":"80826918d8fa5c8aca66144f2054bf93566f112ba8a97927a7faa26a1e671719"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.513997 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.515895 4592 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-zjh4r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.516070 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" podUID="e001240c-c2f8-4102-9a25-8e4e16b1a07d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.516924 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" event={"ID":"25870e3b-7737-4e6a-9ac7-a003d45c140b","Type":"ContainerStarted","Data":"9b339c2682c6ca96f15ab10e4b54b6d296ad154a3bfe8f9f0c1b4c151d08d556"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.522332 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" event={"ID":"642112d6-34bb-4f6a-aafc-2be7ca427dd7","Type":"ContainerStarted","Data":"4ffe39169b8870945308d27f752d675af0c2825470d685fbae19c8a71b112117"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.524535 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.524841 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.024815611 +0000 UTC m=+149.172593292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.534667 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" event={"ID":"3ee0e3db-f2c3-4b37-a024-8759009ed1df","Type":"ContainerStarted","Data":"07fc930c0be4f58274bce351f2af386e5278dac21f0d7daed735db62b1c041ea"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.538488 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-8q6sl" podStartSLOduration=126.538468447 podStartE2EDuration="2m6.538468447s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.508920409 +0000 UTC m=+148.656698100" watchObservedRunningTime="2025-09-29 16:53:38.538468447 +0000 UTC m=+148.686246128" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.549290 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" event={"ID":"7022d695-153c-4fe6-a030-618c0dd54768","Type":"ContainerStarted","Data":"be8fba7d6411c87f27fd8c64df898fd16eff4c7186d4fdaa9d9c9b4e7b64703c"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.549515 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" event={"ID":"7022d695-153c-4fe6-a030-618c0dd54768","Type":"ContainerStarted","Data":"84571bb9f57c8f0b824a36d4e2b7586e3093f977d5641db6535edee3c3f8c329"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.549608 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" event={"ID":"7022d695-153c-4fe6-a030-618c0dd54768","Type":"ContainerStarted","Data":"6a95f44e45989fdaa5d634e57a4890e92201fa8be3d66fbec8663385dab3605d"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.559892 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" event={"ID":"0bd227af-149e-4e98-bb4a-ce3fab42d945","Type":"ContainerStarted","Data":"d8956a6e189416ec5ff1ad2864c4d309140b3043f6c2f16c9faf0515c0dfde7c"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.561864 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mczlm" podStartSLOduration=127.561848771 podStartE2EDuration="2m7.561848771s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.541436835 +0000 UTC m=+148.689214516" watchObservedRunningTime="2025-09-29 16:53:38.561848771 +0000 UTC m=+148.709626462" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.569261 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" event={"ID":"a3a24f23-fa27-4ee5-9899-39aac4ec8dcd","Type":"ContainerStarted","Data":"b834fc2a1fbf41c5c389797bb47650b0f24cf7d1917e7fffe041aa47bc5638a3"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.582625 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" event={"ID":"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865","Type":"ContainerStarted","Data":"7fa72d8cbc80dde3af64243d9bc41c157dd88f6349709cf2385a20d526acdaf8"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.586657 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" event={"ID":"81763dda-c34c-4bdf-a422-bbb5a76d8c95","Type":"ContainerStarted","Data":"93e8db35c3ec346a6785064d7a59895c74377a3e88fb72a74b9a9dcc35e3cad1"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.590512 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" podStartSLOduration=126.590493802 podStartE2EDuration="2m6.590493802s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.566378916 +0000 UTC m=+148.714156597" watchObservedRunningTime="2025-09-29 16:53:38.590493802 +0000 UTC m=+148.738271473" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.595760 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" event={"ID":"1faaded6-9ed5-4eef-9df7-bd5d8363ea14","Type":"ContainerStarted","Data":"6f8d57e1cbc8bb4743b78303a4e965cfa17de31d3bf2d52aece31a649d7208e5"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.596364 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.601332 4592 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hmszm container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.601395 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" podUID="1faaded6-9ed5-4eef-9df7-bd5d8363ea14" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.608814 4592 generic.go:334] "Generic (PLEG): container finished" podID="34f42b2b-8a4b-41da-89cc-cd4da7edafe0" containerID="bee40c2f42965bbb187e8548f675e9ecfcd012a543e7a8e77465f4ffd568052c" exitCode=0 Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.608891 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" event={"ID":"34f42b2b-8a4b-41da-89cc-cd4da7edafe0","Type":"ContainerDied","Data":"bee40c2f42965bbb187e8548f675e9ecfcd012a543e7a8e77465f4ffd568052c"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.627959 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-48whw" podStartSLOduration=126.627944924 podStartE2EDuration="2m6.627944924s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.627195242 +0000 UTC m=+148.774972923" watchObservedRunningTime="2025-09-29 16:53:38.627944924 +0000 UTC m=+148.775722605" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.628264 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.629193 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-789qk" podStartSLOduration=127.62918537 podStartE2EDuration="2m7.62918537s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.59549897 +0000 UTC m=+148.743276651" watchObservedRunningTime="2025-09-29 16:53:38.62918537 +0000 UTC m=+148.776963051" Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.630503 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.130487539 +0000 UTC m=+149.278265220 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.649584 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" event={"ID":"bc578100-b929-4249-bd7e-de64d0469bb9","Type":"ContainerStarted","Data":"f363c4f5ebe8cfb9ba8f7a1a96f54566c73bac77a4c3c8f7de5fc7867e3f909b"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.649814 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" event={"ID":"bc578100-b929-4249-bd7e-de64d0469bb9","Type":"ContainerStarted","Data":"4688182eb4e124701fa06e2935fb80291baff3823006051a9fb4b4eb16b47184"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.650226 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.658372 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-8gn4w" podStartSLOduration=126.658354427 podStartE2EDuration="2m6.658354427s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.658063818 +0000 UTC m=+148.805841499" watchObservedRunningTime="2025-09-29 16:53:38.658354427 +0000 UTC m=+148.806132108" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.679340 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" event={"ID":"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e","Type":"ContainerStarted","Data":"e48e727c7189006315b19ee8140f397461331dca6ed2fb5b42b8a6fba6c42b53"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.679383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" event={"ID":"8bede80e-3f98-4ca7-be3d-20e7bc9ea19e","Type":"ContainerStarted","Data":"092eb1eb841e42cbb8f1f3002b41a7552ee6fe19dc25f57b0dfa3dec0a7c8fd5"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.697076 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" event={"ID":"542d754d-bd15-40b7-8208-876f318413a9","Type":"ContainerStarted","Data":"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5"} Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.697431 4592 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-g2gnz container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.697457 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.16:8443/healthz\": dial tcp 10.217.0.16:8443: connect: connection refused" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.698005 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.707111 4592 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hwj5f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.707355 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.707419 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" podStartSLOduration=126.707399842 podStartE2EDuration="2m6.707399842s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.684571795 +0000 UTC m=+148.832349486" watchObservedRunningTime="2025-09-29 16:53:38.707399842 +0000 UTC m=+148.855177523" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.708873 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-d4pxx" podStartSLOduration=126.708860706 podStartE2EDuration="2m6.708860706s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.707759693 +0000 UTC m=+148.855537374" watchObservedRunningTime="2025-09-29 16:53:38.708860706 +0000 UTC m=+148.856638387" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.729479 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.730802 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.230788288 +0000 UTC m=+149.378565959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.760424 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" podStartSLOduration=126.760408557 podStartE2EDuration="2m6.760408557s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.759140729 +0000 UTC m=+148.906930841" watchObservedRunningTime="2025-09-29 16:53:38.760408557 +0000 UTC m=+148.908186238" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.833833 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.834163 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.334130216 +0000 UTC m=+149.481907897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.836090 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-4m5cj" podStartSLOduration=126.836070473 podStartE2EDuration="2m6.836070473s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.835555528 +0000 UTC m=+148.983333209" watchObservedRunningTime="2025-09-29 16:53:38.836070473 +0000 UTC m=+148.983848154" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.896482 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-d78kk" podStartSLOduration=126.896463347 podStartE2EDuration="2m6.896463347s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.894267221 +0000 UTC m=+149.042044912" watchObservedRunningTime="2025-09-29 16:53:38.896463347 +0000 UTC m=+149.044241028" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.930738 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-tsm97" podStartSLOduration=126.930723414 podStartE2EDuration="2m6.930723414s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.929125756 +0000 UTC m=+149.076939638" watchObservedRunningTime="2025-09-29 16:53:38.930723414 +0000 UTC m=+149.078501095" Sep 29 16:53:38 crc kubenswrapper[4592]: I0929 16:53:38.934756 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:38 crc kubenswrapper[4592]: E0929 16:53:38.935164 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.435128835 +0000 UTC m=+149.582906526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.001170 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" podStartSLOduration=127.001131284 podStartE2EDuration="2m7.001131284s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.963853188 +0000 UTC m=+149.111630889" watchObservedRunningTime="2025-09-29 16:53:39.001131284 +0000 UTC m=+149.148908965" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.036265 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.036727 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.536715241 +0000 UTC m=+149.684492922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.043193 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nncw8" podStartSLOduration=127.043180833 podStartE2EDuration="2m7.043180833s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.0410849 +0000 UTC m=+149.188862581" watchObservedRunningTime="2025-09-29 16:53:39.043180833 +0000 UTC m=+149.190958514" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.043653 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-rjjgh" podStartSLOduration=127.043649437 podStartE2EDuration="2m7.043649437s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:38.999108964 +0000 UTC m=+149.146886645" watchObservedRunningTime="2025-09-29 16:53:39.043649437 +0000 UTC m=+149.191427118" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.066342 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:39 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:39 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:39 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.066419 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.069705 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" podStartSLOduration=127.06967765 podStartE2EDuration="2m7.06967765s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.067809684 +0000 UTC m=+149.215587365" watchObservedRunningTime="2025-09-29 16:53:39.06967765 +0000 UTC m=+149.217455331" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.107296 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" podStartSLOduration=128.107280826 podStartE2EDuration="2m8.107280826s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.106555645 +0000 UTC m=+149.254333316" watchObservedRunningTime="2025-09-29 16:53:39.107280826 +0000 UTC m=+149.255058507" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.135858 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" podStartSLOduration=127.135838234 podStartE2EDuration="2m7.135838234s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.132451444 +0000 UTC m=+149.280229135" watchObservedRunningTime="2025-09-29 16:53:39.135838234 +0000 UTC m=+149.283615935" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.137123 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.137667 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.637653168 +0000 UTC m=+149.785430849 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.238458 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.238929 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.738908214 +0000 UTC m=+149.886685955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.339578 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.339764 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.839737528 +0000 UTC m=+149.987515209 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.339840 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.340182 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.840169501 +0000 UTC m=+149.987947182 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.440662 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.441075 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:39.941060557 +0000 UTC m=+150.088838238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.542608 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.543041 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.043023265 +0000 UTC m=+150.190800946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.643968 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.644111 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.144087266 +0000 UTC m=+150.291864937 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.644365 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.644792 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.144777686 +0000 UTC m=+150.292555367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.703203 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" event={"ID":"df0fe5a9-e5dd-40b0-8d51-addb3b8c8865","Type":"ContainerStarted","Data":"943e87323eabfc1b1debced3c7eb01bc69553fe6f4d37cf21e4010212d03e6b5"} Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.704600 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" event={"ID":"df3b90d4-ee10-47a5-a84f-5beb55894684","Type":"ContainerStarted","Data":"e548dce27d153075d053f820c8e20c8c628c6793f8e4df7abfe6501ab736f206"} Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.706775 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" event={"ID":"34f42b2b-8a4b-41da-89cc-cd4da7edafe0","Type":"ContainerStarted","Data":"0a457bc8757ac1d105b6751b943775f92436c73ea275f3cd15076e45f735e9ef"} Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.709394 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-4sx67" event={"ID":"94d01a53-233d-4e33-83c5-64a6200807b0","Type":"ContainerStarted","Data":"77623c1ce33504a60cf331d586998576ef66ea128e89bc723f94bf0b999a1661"} Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.710615 4592 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hwj5f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.710686 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.745102 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.745324 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.24529136 +0000 UTC m=+150.393069051 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.745650 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.748457 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.248442514 +0000 UTC m=+150.396220195 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.770571 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hmszm" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.774742 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-msxgb" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.779393 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-46ndj" podStartSLOduration=127.779375312 podStartE2EDuration="2m7.779375312s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.778861087 +0000 UTC m=+149.926638768" watchObservedRunningTime="2025-09-29 16:53:39.779375312 +0000 UTC m=+149.927152993" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.851061 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.851789 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.351752081 +0000 UTC m=+150.499529762 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.901825 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" podStartSLOduration=127.901808318 podStartE2EDuration="2m7.901808318s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:39.870719885 +0000 UTC m=+150.018497566" watchObservedRunningTime="2025-09-29 16:53:39.901808318 +0000 UTC m=+150.049585999" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.952852 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.952903 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.952922 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.952941 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.952989 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:39 crc kubenswrapper[4592]: E0929 16:53:39.955066 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.455047189 +0000 UTC m=+150.602824870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.986126 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.987340 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.991826 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:39 crc kubenswrapper[4592]: I0929 16:53:39.992558 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.054004 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.054390 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.554369897 +0000 UTC m=+150.702147578 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.058343 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:40 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:40 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:40 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.058397 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.109596 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.155272 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.155685 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.655672915 +0000 UTC m=+150.803450596 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.199729 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.206467 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.214596 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.257132 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.257589 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.757571451 +0000 UTC m=+150.905349132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.358905 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.359384 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.859362513 +0000 UTC m=+151.007140194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.461667 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.461955 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.961924249 +0000 UTC m=+151.109701930 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.462124 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.462565 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:40.962556418 +0000 UTC m=+151.110334099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.563778 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.564098 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.064081102 +0000 UTC m=+151.211858783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.665698 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.666138 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.166121341 +0000 UTC m=+151.313899022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.712069 4592 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-zjh4r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.712124 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" podUID="e001240c-c2f8-4102-9a25-8e4e16b1a07d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.726358 4592 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-hwj5f container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.726418 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.34:8080/healthz\": dial tcp 10.217.0.34:8080: connect: connection refused" Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.766658 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.768446 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.268426129 +0000 UTC m=+151.416203820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.868745 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.869017 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.369006916 +0000 UTC m=+151.516784587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:40 crc kubenswrapper[4592]: I0929 16:53:40.973314 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:40 crc kubenswrapper[4592]: E0929 16:53:40.973666 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.473650883 +0000 UTC m=+151.621428564 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.063338 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:41 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:41 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:41 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.063383 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.075728 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.076048 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.576020563 +0000 UTC m=+151.723798244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.077852 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-4sx67" podStartSLOduration=10.077839586 podStartE2EDuration="10.077839586s" podCreationTimestamp="2025-09-29 16:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:40.249484391 +0000 UTC m=+150.397262072" watchObservedRunningTime="2025-09-29 16:53:41.077839586 +0000 UTC m=+151.225617267" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.178628 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.179238 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.679222737 +0000 UTC m=+151.827000418 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.281740 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.282126 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.782092741 +0000 UTC m=+151.929870422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.382678 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.382963 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.882911675 +0000 UTC m=+152.030689386 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.483823 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.484122 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:41.984110189 +0000 UTC m=+152.131887870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.585136 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.585453 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.085439199 +0000 UTC m=+152.233216880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.594460 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.595320 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.625242 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 16:53:41 crc kubenswrapper[4592]: W0929 16:53:41.626289 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-4dd56e6b7632b66705bca76ccff08b2c3dadb4e8fe7bb42372600b2523d58db7 WatchSource:0}: Error finding container 4dd56e6b7632b66705bca76ccff08b2c3dadb4e8fe7bb42372600b2523d58db7: Status 404 returned error can't find the container with id 4dd56e6b7632b66705bca76ccff08b2c3dadb4e8fe7bb42372600b2523d58db7 Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.656685 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.675682 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.678064 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.683858 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.684014 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.686664 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.686713 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.686796 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.686825 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdslw\" (UniqueName: \"kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.687113 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.187099567 +0000 UTC m=+152.334877248 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.690018 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.726768 4592 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-zjh4r container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.726820 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" podUID="e001240c-c2f8-4102-9a25-8e4e16b1a07d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.790566 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.790742 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.290715174 +0000 UTC m=+152.438492855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.790878 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.790940 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdslw\" (UniqueName: \"kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791010 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791045 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.791092 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.291078694 +0000 UTC m=+152.438856375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791123 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791285 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791317 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4dd56e6b7632b66705bca76ccff08b2c3dadb4e8fe7bb42372600b2523d58db7"} Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791503 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.791794 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.797603 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" event={"ID":"df3b90d4-ee10-47a5-a84f-5beb55894684","Type":"ContainerStarted","Data":"01edb39ae94e83019a0bdd5089cf190388c7e076078f4b3ed85179f7f09b13d4"} Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.797643 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" event={"ID":"df3b90d4-ee10-47a5-a84f-5beb55894684","Type":"ContainerStarted","Data":"ee2acc1870e7c4fa478a0b7d792f8ff782b10c756d50564cfa2f1cb53604b094"} Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.832353 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4832e4c6faeb635da3fc31fdf7daa94ca6c7bf4beb6d835183f93ece374b986f"} Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.835049 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdslw\" (UniqueName: \"kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw\") pod \"certified-operators-hdhfp\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.835820 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"ca2cb3d3c9cc2667028449fb84283724c21100da90dc396f596cb7d10343afbe"} Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.892727 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.893035 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.893066 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.893325 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:41 crc kubenswrapper[4592]: E0929 16:53:41.894105 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.394090503 +0000 UTC m=+152.541868184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.967816 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:53:41 crc kubenswrapper[4592]: I0929 16:53:41.969213 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.001941 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.002010 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.002060 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvgtj\" (UniqueName: \"kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.002089 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.002379 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.502368487 +0000 UTC m=+152.650146168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.002790 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.079843 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:42 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:42 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:42 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.079904 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.086824 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-8pdqk" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.104331 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.104449 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvgtj\" (UniqueName: \"kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.104526 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.104599 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.105042 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.105121 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.605107578 +0000 UTC m=+152.752885259 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.105531 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.105625 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.158650 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.158955 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.159295 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.159354 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.187289 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.210622 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvgtj\" (UniqueName: \"kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj\") pod \"certified-operators-2tf2t\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.211189 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.211486 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.711469917 +0000 UTC m=+152.859247598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.279672 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.280703 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.286031 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.311561 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.311907 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.312132 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.312198 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt2p4\" (UniqueName: \"kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.312303 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.312392 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.812374273 +0000 UTC m=+152.960152034 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.314261 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.380508 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.416054 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.416128 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.416171 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.416213 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt2p4\" (UniqueName: \"kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.416741 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:42.916729411 +0000 UTC m=+153.064507092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.417375 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.417572 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.487756 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.489222 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.491597 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt2p4\" (UniqueName: \"kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4\") pod \"community-operators-k498t\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.524326 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.524688 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn6zr\" (UniqueName: \"kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.524817 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.524897 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.024873842 +0000 UTC m=+153.172651523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.524961 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.556710 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.615490 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.627917 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn6zr\" (UniqueName: \"kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.627976 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.628007 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.628034 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.628470 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.628613 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.128596882 +0000 UTC m=+153.276374563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.628714 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.696986 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn6zr\" (UniqueName: \"kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr\") pod \"community-operators-tqwzq\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.730739 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.731089 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.231068815 +0000 UTC m=+153.378846496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.827440 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.833110 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.833459 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.333449165 +0000 UTC m=+153.481226846 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.869428 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" event={"ID":"df3b90d4-ee10-47a5-a84f-5beb55894684","Type":"ContainerStarted","Data":"4fe8d2a7874fc480caf88273307abab9ecf77e2a3de50fcb181e7bc157970718"} Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.883932 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5a3287a528762937297e8d504fafdf4e742a30145fcfe4ecbe259ae120171ea4"} Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.884272 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.891285 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"97cab742e6c3cf20207e1a93e3423a67d91ea904dde7de2aa04e726418ef72d6"} Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.916732 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a89209f8929431b9840765c7150684cd65a506b0af38f4b326227c5e42bc1ed0"} Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.934675 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:42 crc kubenswrapper[4592]: E0929 16:53:42.938175 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.438123923 +0000 UTC m=+153.585901614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.964826 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.965090 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:42 crc kubenswrapper[4592]: I0929 16:53:42.964944 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-2nsh7" podStartSLOduration=11.964929129 podStartE2EDuration="11.964929129s" podCreationTimestamp="2025-09-29 16:53:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:42.949406788 +0000 UTC m=+153.097184469" watchObservedRunningTime="2025-09-29 16:53:42.964929129 +0000 UTC m=+153.112706810" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.038875 4592 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.039037 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.040703 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.540689568 +0000 UTC m=+153.688467249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.042688 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.048446 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.054668 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:43 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:43 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:43 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.054709 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.100731 4592 patch_prober.go:28] interesting pod/apiserver-76f77b778f-qtlgd container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]log ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]etcd ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/start-apiserver-admission-initializer ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/generic-apiserver-start-informers ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/max-in-flight-filter ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/storage-object-count-tracker-hook ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/image.openshift.io-apiserver-caches ok Sep 29 16:53:43 crc kubenswrapper[4592]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Sep 29 16:53:43 crc kubenswrapper[4592]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/project.openshift.io-projectcache ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-startinformers ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/openshift.io-restmapperupdater ok Sep 29 16:53:43 crc kubenswrapper[4592]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Sep 29 16:53:43 crc kubenswrapper[4592]: livez check failed Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.100824 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" podUID="8bede80e-3f98-4ca7-be3d-20e7bc9ea19e" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.140332 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.141344 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.641329436 +0000 UTC m=+153.789107107 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.241891 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.242390 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.742379397 +0000 UTC m=+153.890157078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.343069 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.343277 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.843250652 +0000 UTC m=+153.991028323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.343388 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.343702 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.843694975 +0000 UTC m=+153.991472656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.364100 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.444922 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.445176 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.945132167 +0000 UTC m=+154.092909858 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.445606 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: E0929 16:53:43.445979 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-09-29 16:53:43.945964402 +0000 UTC m=+154.093742083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w748k" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.447639 4592 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-09-29T16:53:43.038902605Z","Handler":null,"Name":""} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.497300 4592 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.497335 4592 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.546313 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.553713 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.624064 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.652792 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.850663 4592 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.850723 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.851797 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.872457 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.873197 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.873661 4592 patch_prober.go:28] interesting pod/console-f9d7485db-zn6hr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.873726 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zn6hr" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.873815 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.873849 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.890938 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.918289 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.931746 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerStarted","Data":"9d01575d567b4c02fd46d97a2719ae84467675bc179bd3adcf8bbc6a2b4ac64a"} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.936820 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerStarted","Data":"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4"} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.936857 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerStarted","Data":"6b465fe3056d2a5cecc4b2f79ff52e8bdec3bfb1fc9e7851c6385ee295c88965"} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.939427 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerStarted","Data":"320fa111145dc4d47a15d4571bf82eda2d5d4485dfdd99d7ef9591ceea3e2f5f"} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.942769 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f","Type":"ContainerStarted","Data":"c39718c040eca032b21938f6a2e5f72382e1bd0c800ebbb8e831e53dff888499"} Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.957681 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.958820 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.962472 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-pb7hk" Sep 29 16:53:43 crc kubenswrapper[4592]: I0929 16:53:43.977735 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.006114 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.065891 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.067337 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.067369 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.067404 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97mr6\" (UniqueName: \"kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.070170 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:44 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:44 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:44 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.070223 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.086055 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-zjh4r" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.178728 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.178993 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.179130 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97mr6\" (UniqueName: \"kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.180417 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.180418 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.224609 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.250123 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97mr6\" (UniqueName: \"kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6\") pod \"redhat-marketplace-r9mwk\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.279503 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.364842 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w748k\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.395509 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.398971 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.400088 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.442699 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.484344 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.484414 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.484443 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wfxn\" (UniqueName: \"kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.586729 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.586775 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.586794 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wfxn\" (UniqueName: \"kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.587407 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.587631 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.640583 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wfxn\" (UniqueName: \"kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn\") pod \"redhat-marketplace-lb49j\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.749494 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.968345 4592 generic.go:334] "Generic (PLEG): container finished" podID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerID="87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4" exitCode=0 Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.968452 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerDied","Data":"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4"} Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.972244 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.976103 4592 generic.go:334] "Generic (PLEG): container finished" podID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerID="5f10f47d6c628b3cb26d461feb568636c7cbf8777c11655834f6a87c83eeff63" exitCode=0 Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.976184 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerDied","Data":"5f10f47d6c628b3cb26d461feb568636c7cbf8777c11655834f6a87c83eeff63"} Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.994800 4592 generic.go:334] "Generic (PLEG): container finished" podID="836078bb-4975-4487-9404-6a3e4348292b" containerID="3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c" exitCode=0 Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.995893 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerDied","Data":"3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c"} Sep 29 16:53:44 crc kubenswrapper[4592]: I0929 16:53:44.995923 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerStarted","Data":"d3f95a6223ac1a3a9dee96cdc54e839ff77e6bed332432e0388243dd595e2268"} Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.034224 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f","Type":"ContainerStarted","Data":"dd6c2d26eb1fb69c33c15c5846454f52afab23a000fadaa715c4eb607588fcc4"} Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.040880 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.067936 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:45 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:45 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:45 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.067992 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.084628 4592 generic.go:334] "Generic (PLEG): container finished" podID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerID="e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685" exitCode=0 Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.085837 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerDied","Data":"e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685"} Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.120816 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.120799562 podStartE2EDuration="4.120799562s" podCreationTimestamp="2025-09-29 16:53:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:45.117494763 +0000 UTC m=+155.265272454" watchObservedRunningTime="2025-09-29 16:53:45.120799562 +0000 UTC m=+155.268577243" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.231451 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.232007 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.232983 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.244633 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.261449 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.322033 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-562jq\" (UniqueName: \"kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.322268 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.322295 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.423469 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.423526 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.423562 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-562jq\" (UniqueName: \"kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.423985 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.424071 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.425586 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:53:45 crc kubenswrapper[4592]: W0929 16:53:45.433830 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9b6b21e_615b_458b_ae60_5e8535dea0c1.slice/crio-d3c53260f2112428596b79ff36eb3c564c9946ba837984dbe0b032123fbc4579 WatchSource:0}: Error finding container d3c53260f2112428596b79ff36eb3c564c9946ba837984dbe0b032123fbc4579: Status 404 returned error can't find the container with id d3c53260f2112428596b79ff36eb3c564c9946ba837984dbe0b032123fbc4579 Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.496085 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-562jq\" (UniqueName: \"kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq\") pod \"redhat-operators-4c6m9\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.525602 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.555171 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.556295 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.571206 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.602771 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.628603 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.628650 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc4mg\" (UniqueName: \"kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.628698 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.662785 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.732899 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.732959 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc4mg\" (UniqueName: \"kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.733016 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.733556 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.736693 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.759524 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc4mg\" (UniqueName: \"kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg\") pod \"redhat-operators-nhcpn\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:45 crc kubenswrapper[4592]: I0929 16:53:45.915531 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.064035 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:46 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:46 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:46 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.064119 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.115460 4592 generic.go:334] "Generic (PLEG): container finished" podID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerID="b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109" exitCode=0 Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.115861 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerDied","Data":"b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.115912 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerStarted","Data":"d3c53260f2112428596b79ff36eb3c564c9946ba837984dbe0b032123fbc4579"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.138543 4592 generic.go:334] "Generic (PLEG): container finished" podID="25870e3b-7737-4e6a-9ac7-a003d45c140b" containerID="9b339c2682c6ca96f15ab10e4b54b6d296ad154a3bfe8f9f0c1b4c151d08d556" exitCode=0 Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.138623 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" event={"ID":"25870e3b-7737-4e6a-9ac7-a003d45c140b","Type":"ContainerDied","Data":"9b339c2682c6ca96f15ab10e4b54b6d296ad154a3bfe8f9f0c1b4c151d08d556"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.188832 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.197842 4592 generic.go:334] "Generic (PLEG): container finished" podID="ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" containerID="dd6c2d26eb1fb69c33c15c5846454f52afab23a000fadaa715c4eb607588fcc4" exitCode=0 Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.197974 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f","Type":"ContainerDied","Data":"dd6c2d26eb1fb69c33c15c5846454f52afab23a000fadaa715c4eb607588fcc4"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.204968 4592 generic.go:334] "Generic (PLEG): container finished" podID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerID="8caac1d9ebb8445ec641dc0c77ff146ad6d80561b328cce8ce1f63d65103f283" exitCode=0 Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.205067 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerDied","Data":"8caac1d9ebb8445ec641dc0c77ff146ad6d80561b328cce8ce1f63d65103f283"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.205101 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerStarted","Data":"7d654e55ca74ca82f08d5277b9a5a2c39b65b5159d3e31d8a2976a001da87114"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.254105 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" event={"ID":"d4b30d14-aea8-4482-8220-81cf36dc8a93","Type":"ContainerStarted","Data":"0a99c9f872186930dc2ff26f77162bd11595bcc5751d9fca6858048d734722da"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.254253 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" event={"ID":"d4b30d14-aea8-4482-8220-81cf36dc8a93","Type":"ContainerStarted","Data":"ab8fc55153f851e07765574d27781a5e2cd6619fbf638692d7b1273d1d30869a"} Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.255921 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.684786 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" podStartSLOduration=134.684768529 podStartE2EDuration="2m14.684768529s" podCreationTimestamp="2025-09-29 16:51:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:46.304750005 +0000 UTC m=+156.452527686" watchObservedRunningTime="2025-09-29 16:53:46.684768529 +0000 UTC m=+156.832546210" Sep 29 16:53:46 crc kubenswrapper[4592]: I0929 16:53:46.685526 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:53:46 crc kubenswrapper[4592]: W0929 16:53:46.826172 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4829df1e_952f_4db8_b3ee_218715359da1.slice/crio-1ca33f9bd8fa7a4137c4abed51a7fa17ef404091cc0b5df886b07f880eff2d2c WatchSource:0}: Error finding container 1ca33f9bd8fa7a4137c4abed51a7fa17ef404091cc0b5df886b07f880eff2d2c: Status 404 returned error can't find the container with id 1ca33f9bd8fa7a4137c4abed51a7fa17ef404091cc0b5df886b07f880eff2d2c Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.050937 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:47 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:47 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:47 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.050992 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.278924 4592 generic.go:334] "Generic (PLEG): container finished" podID="4829df1e-952f-4db8-b3ee-218715359da1" containerID="5868b4369e29f8d5f77bf2b7c2a1d7df4e2354c8e0e6a5fecb50408ea82fd439" exitCode=0 Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.279090 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerDied","Data":"5868b4369e29f8d5f77bf2b7c2a1d7df4e2354c8e0e6a5fecb50408ea82fd439"} Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.279123 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerStarted","Data":"1ca33f9bd8fa7a4137c4abed51a7fa17ef404091cc0b5df886b07f880eff2d2c"} Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.298008 4592 generic.go:334] "Generic (PLEG): container finished" podID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerID="125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41" exitCode=0 Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.298699 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerDied","Data":"125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41"} Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.298733 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerStarted","Data":"a2aa740d209e198a94589d8c0a023dcf22b1308676175bd9eac2f368779ed80c"} Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.721459 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.785180 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume\") pod \"25870e3b-7737-4e6a-9ac7-a003d45c140b\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.785263 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hlw8\" (UniqueName: \"kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8\") pod \"25870e3b-7737-4e6a-9ac7-a003d45c140b\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.785289 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume\") pod \"25870e3b-7737-4e6a-9ac7-a003d45c140b\" (UID: \"25870e3b-7737-4e6a-9ac7-a003d45c140b\") " Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.786140 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume" (OuterVolumeSpecName: "config-volume") pod "25870e3b-7737-4e6a-9ac7-a003d45c140b" (UID: "25870e3b-7737-4e6a-9ac7-a003d45c140b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.802939 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8" (OuterVolumeSpecName: "kube-api-access-7hlw8") pod "25870e3b-7737-4e6a-9ac7-a003d45c140b" (UID: "25870e3b-7737-4e6a-9ac7-a003d45c140b"). InnerVolumeSpecName "kube-api-access-7hlw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.816397 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "25870e3b-7737-4e6a-9ac7-a003d45c140b" (UID: "25870e3b-7737-4e6a-9ac7-a003d45c140b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.866709 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.886287 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/25870e3b-7737-4e6a-9ac7-a003d45c140b-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.886318 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hlw8\" (UniqueName: \"kubernetes.io/projected/25870e3b-7737-4e6a-9ac7-a003d45c140b-kube-api-access-7hlw8\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.886328 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/25870e3b-7737-4e6a-9ac7-a003d45c140b-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.965471 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.970904 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qtlgd" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.987031 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access\") pod \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.987199 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir\") pod \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\" (UID: \"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f\") " Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.987566 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" (UID: "ff48f4f6-07c9-4ed6-9b43-372b35b8c95f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 16:53:47 crc kubenswrapper[4592]: I0929 16:53:47.991716 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" (UID: "ff48f4f6-07c9-4ed6-9b43-372b35b8c95f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.049110 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:48 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:48 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:48 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.049187 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.088733 4592 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.092360 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ff48f4f6-07c9-4ed6-9b43-372b35b8c95f-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.142292 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 16:53:48 crc kubenswrapper[4592]: E0929 16:53:48.142599 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25870e3b-7737-4e6a-9ac7-a003d45c140b" containerName="collect-profiles" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.142815 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="25870e3b-7737-4e6a-9ac7-a003d45c140b" containerName="collect-profiles" Sep 29 16:53:48 crc kubenswrapper[4592]: E0929 16:53:48.142830 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" containerName="pruner" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.142838 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" containerName="pruner" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.142963 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="25870e3b-7737-4e6a-9ac7-a003d45c140b" containerName="collect-profiles" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.142978 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff48f4f6-07c9-4ed6-9b43-372b35b8c95f" containerName="pruner" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.143489 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.143912 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.146057 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.146226 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.195571 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.195672 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.296887 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.297429 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.297455 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.321228 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.321787 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ff48f4f6-07c9-4ed6-9b43-372b35b8c95f","Type":"ContainerDied","Data":"c39718c040eca032b21938f6a2e5f72382e1bd0c800ebbb8e831e53dff888499"} Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.321820 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c39718c040eca032b21938f6a2e5f72382e1bd0c800ebbb8e831e53dff888499" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.321877 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.340198 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.340216 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s" event={"ID":"25870e3b-7737-4e6a-9ac7-a003d45c140b","Type":"ContainerDied","Data":"3da83bafaff9c27e5ebffc69c904121631cc9f2956fa27d5bb6d9f5388b089c6"} Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.340283 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3da83bafaff9c27e5ebffc69c904121631cc9f2956fa27d5bb6d9f5388b089c6" Sep 29 16:53:48 crc kubenswrapper[4592]: I0929 16:53:48.460330 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:49 crc kubenswrapper[4592]: I0929 16:53:49.051348 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:49 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:49 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:49 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:49 crc kubenswrapper[4592]: I0929 16:53:49.051745 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:49 crc kubenswrapper[4592]: I0929 16:53:49.135884 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-4sx67" Sep 29 16:53:49 crc kubenswrapper[4592]: I0929 16:53:49.285959 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Sep 29 16:53:49 crc kubenswrapper[4592]: I0929 16:53:49.364585 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e581d2d-b885-4469-b791-f468419d0b66","Type":"ContainerStarted","Data":"8945139aae67e33e2e3f52184e2877f43bf74b4e656f9282f9944f6855d4e41e"} Sep 29 16:53:50 crc kubenswrapper[4592]: I0929 16:53:50.052134 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:50 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:50 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:50 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:50 crc kubenswrapper[4592]: I0929 16:53:50.052576 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:51 crc kubenswrapper[4592]: I0929 16:53:51.056357 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:51 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:51 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:51 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:51 crc kubenswrapper[4592]: I0929 16:53:51.056634 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:51 crc kubenswrapper[4592]: I0929 16:53:51.389487 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e581d2d-b885-4469-b791-f468419d0b66","Type":"ContainerStarted","Data":"57bcf3b62d60568e40f16ef1ebae4f0c9db6ce595875503eb4b1eb87dd096236"} Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.049850 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:52 crc kubenswrapper[4592]: [-]has-synced failed: reason withheld Sep 29 16:53:52 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:52 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.049923 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.161913 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.161964 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.162050 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.162098 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.417163 4592 generic.go:334] "Generic (PLEG): container finished" podID="9e581d2d-b885-4469-b791-f468419d0b66" containerID="57bcf3b62d60568e40f16ef1ebae4f0c9db6ce595875503eb4b1eb87dd096236" exitCode=0 Sep 29 16:53:52 crc kubenswrapper[4592]: I0929 16:53:52.417297 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e581d2d-b885-4469-b791-f468419d0b66","Type":"ContainerDied","Data":"57bcf3b62d60568e40f16ef1ebae4f0c9db6ce595875503eb4b1eb87dd096236"} Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.049442 4592 patch_prober.go:28] interesting pod/router-default-5444994796-t4tpk container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Sep 29 16:53:53 crc kubenswrapper[4592]: [+]has-synced ok Sep 29 16:53:53 crc kubenswrapper[4592]: [+]process-running ok Sep 29 16:53:53 crc kubenswrapper[4592]: healthz check failed Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.049559 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t4tpk" podUID="b3554952-1f35-4ce9-9a10-1caa25c188fb" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.801678 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.872674 4592 patch_prober.go:28] interesting pod/console-f9d7485db-zn6hr container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.873060 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zn6hr" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" probeResult="failure" output="Get \"https://10.217.0.13:8443/health\": dial tcp 10.217.0.13:8443: connect: connection refused" Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.920125 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access\") pod \"9e581d2d-b885-4469-b791-f468419d0b66\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.920254 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir\") pod \"9e581d2d-b885-4469-b791-f468419d0b66\" (UID: \"9e581d2d-b885-4469-b791-f468419d0b66\") " Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.922108 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9e581d2d-b885-4469-b791-f468419d0b66" (UID: "9e581d2d-b885-4469-b791-f468419d0b66"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 16:53:53 crc kubenswrapper[4592]: I0929 16:53:53.947490 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9e581d2d-b885-4469-b791-f468419d0b66" (UID: "9e581d2d-b885-4469-b791-f468419d0b66"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.023339 4592 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e581d2d-b885-4469-b791-f468419d0b66-kubelet-dir\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.023368 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e581d2d-b885-4469-b791-f468419d0b66-kube-api-access\") on node \"crc\" DevicePath \"\"" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.054888 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.058869 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-t4tpk" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.460110 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.460191 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"9e581d2d-b885-4469-b791-f468419d0b66","Type":"ContainerDied","Data":"8945139aae67e33e2e3f52184e2877f43bf74b4e656f9282f9944f6855d4e41e"} Sep 29 16:53:54 crc kubenswrapper[4592]: I0929 16:53:54.460225 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8945139aae67e33e2e3f52184e2877f43bf74b4e656f9282f9944f6855d4e41e" Sep 29 16:53:55 crc kubenswrapper[4592]: I0929 16:53:55.178783 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:55 crc kubenswrapper[4592]: I0929 16:53:55.193167 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/484e63f2-7bae-4e57-ab79-95cba3bad285-metrics-certs\") pod \"network-metrics-daemon-qvsjc\" (UID: \"484e63f2-7bae-4e57-ab79-95cba3bad285\") " pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:55 crc kubenswrapper[4592]: I0929 16:53:55.395787 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-qvsjc" Sep 29 16:53:56 crc kubenswrapper[4592]: I0929 16:53:56.020690 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-qvsjc"] Sep 29 16:53:56 crc kubenswrapper[4592]: W0929 16:53:56.032936 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod484e63f2_7bae_4e57_ab79_95cba3bad285.slice/crio-2542e90a970378a26a0cde85985f6d215685958b3eb4a9ad4294e2256f074e13 WatchSource:0}: Error finding container 2542e90a970378a26a0cde85985f6d215685958b3eb4a9ad4294e2256f074e13: Status 404 returned error can't find the container with id 2542e90a970378a26a0cde85985f6d215685958b3eb4a9ad4294e2256f074e13 Sep 29 16:53:56 crc kubenswrapper[4592]: I0929 16:53:56.478072 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" event={"ID":"484e63f2-7bae-4e57-ab79-95cba3bad285","Type":"ContainerStarted","Data":"2542e90a970378a26a0cde85985f6d215685958b3eb4a9ad4294e2256f074e13"} Sep 29 16:53:57 crc kubenswrapper[4592]: I0929 16:53:57.490118 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" event={"ID":"484e63f2-7bae-4e57-ab79-95cba3bad285","Type":"ContainerStarted","Data":"c689ee2bcde98077b4d46c2fba9f8ac8818caa2d356ed69dd3e42e92bf863c5e"} Sep 29 16:53:58 crc kubenswrapper[4592]: I0929 16:53:58.518974 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-qvsjc" event={"ID":"484e63f2-7bae-4e57-ab79-95cba3bad285","Type":"ContainerStarted","Data":"75b6323d9af733fbeda3be613286d51ad55c1a985ca201d0aa3c6c6620f923f6"} Sep 29 16:53:59 crc kubenswrapper[4592]: I0929 16:53:59.555718 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-qvsjc" podStartSLOduration=148.555700558 podStartE2EDuration="2m28.555700558s" podCreationTimestamp="2025-09-29 16:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:53:59.55172986 +0000 UTC m=+169.699507541" watchObservedRunningTime="2025-09-29 16:53:59.555700558 +0000 UTC m=+169.703478239" Sep 29 16:54:00 crc kubenswrapper[4592]: I0929 16:54:00.883457 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:54:00 crc kubenswrapper[4592]: I0929 16:54:00.883512 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.159836 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.160201 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.160247 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.159885 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.160631 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.160829 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"12c3b51e2cf47790eb69de823dae4ffe59591d3652e5fd7f582ba28c5b49dfbe"} pod="openshift-console/downloads-7954f5f757-49g4p" containerMessage="Container download-server failed liveness probe, will be restarted" Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.160978 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" containerID="cri-o://12c3b51e2cf47790eb69de823dae4ffe59591d3652e5fd7f582ba28c5b49dfbe" gracePeriod=2 Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.161456 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:02 crc kubenswrapper[4592]: I0929 16:54:02.161615 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:03 crc kubenswrapper[4592]: I0929 16:54:03.586655 4592 generic.go:334] "Generic (PLEG): container finished" podID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerID="12c3b51e2cf47790eb69de823dae4ffe59591d3652e5fd7f582ba28c5b49dfbe" exitCode=0 Sep 29 16:54:03 crc kubenswrapper[4592]: I0929 16:54:03.586687 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-49g4p" event={"ID":"e4394fc0-2772-479f-84e4-bbdb7d3b493a","Type":"ContainerDied","Data":"12c3b51e2cf47790eb69de823dae4ffe59591d3652e5fd7f582ba28c5b49dfbe"} Sep 29 16:54:03 crc kubenswrapper[4592]: I0929 16:54:03.908740 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:54:03 crc kubenswrapper[4592]: I0929 16:54:03.913494 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 16:54:04 crc kubenswrapper[4592]: I0929 16:54:04.449130 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 16:54:12 crc kubenswrapper[4592]: I0929 16:54:12.158696 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:12 crc kubenswrapper[4592]: I0929 16:54:12.159230 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:13 crc kubenswrapper[4592]: I0929 16:54:13.996766 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-7h6m6" Sep 29 16:54:20 crc kubenswrapper[4592]: I0929 16:54:20.215074 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Sep 29 16:54:22 crc kubenswrapper[4592]: I0929 16:54:22.158882 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:22 crc kubenswrapper[4592]: I0929 16:54:22.158965 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:30 crc kubenswrapper[4592]: I0929 16:54:30.883068 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:54:30 crc kubenswrapper[4592]: I0929 16:54:30.883442 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:54:32 crc kubenswrapper[4592]: E0929 16:54:32.035114 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 16:54:32 crc kubenswrapper[4592]: E0929 16:54:32.035566 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mn6zr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-tqwzq_openshift-marketplace(81890027-503c-4d1d-94c7-5ce8bdbef726): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:32 crc kubenswrapper[4592]: E0929 16:54:32.036748 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-tqwzq" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" Sep 29 16:54:32 crc kubenswrapper[4592]: I0929 16:54:32.162317 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:32 crc kubenswrapper[4592]: I0929 16:54:32.162385 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:34 crc kubenswrapper[4592]: E0929 16:54:34.315548 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-tqwzq" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" Sep 29 16:54:36 crc kubenswrapper[4592]: E0929 16:54:36.202489 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 16:54:36 crc kubenswrapper[4592]: E0929 16:54:36.202916 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-97mr6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-r9mwk_openshift-marketplace(c9b6b21e-615b-458b-ae60-5e8535dea0c1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:36 crc kubenswrapper[4592]: E0929 16:54:36.205196 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-r9mwk" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" Sep 29 16:54:37 crc kubenswrapper[4592]: E0929 16:54:37.794134 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-r9mwk" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" Sep 29 16:54:37 crc kubenswrapper[4592]: E0929 16:54:37.887732 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 16:54:37 crc kubenswrapper[4592]: E0929 16:54:37.887909 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gvgtj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2tf2t_openshift-marketplace(6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:37 crc kubenswrapper[4592]: E0929 16:54:37.889229 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2tf2t" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.214187 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2tf2t" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.293260 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.293429 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4wfxn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-lb49j_openshift-marketplace(e493af7b-bc6d-4f70-9c4d-65a4b52503fa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.294780 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-lb49j" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.312885 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.313050 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-562jq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4c6m9_openshift-marketplace(9611b65a-9551-46c6-a4ae-70c8d6eb0b54): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.314342 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4c6m9" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.335882 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.336020 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nt2p4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-k498t_openshift-marketplace(836078bb-4975-4487-9404-6a3e4348292b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.337675 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-k498t" podUID="836078bb-4975-4487-9404-6a3e4348292b" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.344397 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.344551 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rc4mg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-nhcpn_openshift-marketplace(4829df1e-952f-4db8-b3ee-218715359da1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.346269 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-nhcpn" podUID="4829df1e-952f-4db8-b3ee-218715359da1" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.362139 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.362328 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zdslw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hdhfp_openshift-marketplace(c0ffea6d-0977-4552-961e-fc318ff7db95): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.363776 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hdhfp" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" Sep 29 16:54:41 crc kubenswrapper[4592]: I0929 16:54:41.796603 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-49g4p" event={"ID":"e4394fc0-2772-479f-84e4-bbdb7d3b493a","Type":"ContainerStarted","Data":"b38f70335d0a74a068d2fd2f4daad052fecfb842ebf813e5e7684f0f2879c750"} Sep 29 16:54:41 crc kubenswrapper[4592]: I0929 16:54:41.798222 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:41 crc kubenswrapper[4592]: I0929 16:54:41.798286 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:41 crc kubenswrapper[4592]: I0929 16:54:41.798376 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.799308 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hdhfp" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.799485 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-k498t" podUID="836078bb-4975-4487-9404-6a3e4348292b" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.799665 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-lb49j" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.800024 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-nhcpn" podUID="4829df1e-952f-4db8-b3ee-218715359da1" Sep 29 16:54:41 crc kubenswrapper[4592]: E0929 16:54:41.805557 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4c6m9" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.158631 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.158667 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.158693 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.158720 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.801765 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:42 crc kubenswrapper[4592]: I0929 16:54:42.801824 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:43 crc kubenswrapper[4592]: I0929 16:54:43.807486 4592 patch_prober.go:28] interesting pod/downloads-7954f5f757-49g4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Sep 29 16:54:43 crc kubenswrapper[4592]: I0929 16:54:43.807789 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-49g4p" podUID="e4394fc0-2772-479f-84e4-bbdb7d3b493a" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Sep 29 16:54:52 crc kubenswrapper[4592]: I0929 16:54:52.165472 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-49g4p" Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.884743 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerStarted","Data":"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24"} Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.886320 4592 generic.go:334] "Generic (PLEG): container finished" podID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerID="ea7be04f532b98ea091f0651763ae5684e6694f9dd2ec3ecd243608f1363f3b2" exitCode=0 Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.886373 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerDied","Data":"ea7be04f532b98ea091f0651763ae5684e6694f9dd2ec3ecd243608f1363f3b2"} Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.894888 4592 generic.go:334] "Generic (PLEG): container finished" podID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerID="5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92" exitCode=0 Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.894995 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerDied","Data":"5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92"} Sep 29 16:54:55 crc kubenswrapper[4592]: I0929 16:54:55.900370 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerStarted","Data":"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc"} Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.906445 4592 generic.go:334] "Generic (PLEG): container finished" podID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerID="2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc" exitCode=0 Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.906520 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerDied","Data":"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc"} Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.908538 4592 generic.go:334] "Generic (PLEG): container finished" podID="836078bb-4975-4487-9404-6a3e4348292b" containerID="69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24" exitCode=0 Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.908562 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerDied","Data":"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24"} Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.915330 4592 generic.go:334] "Generic (PLEG): container finished" podID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerID="2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf" exitCode=0 Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.915406 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerDied","Data":"2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf"} Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.921614 4592 generic.go:334] "Generic (PLEG): container finished" podID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerID="8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5" exitCode=0 Sep 29 16:54:56 crc kubenswrapper[4592]: I0929 16:54:56.921653 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerDied","Data":"8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5"} Sep 29 16:55:00 crc kubenswrapper[4592]: I0929 16:55:00.882972 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:55:00 crc kubenswrapper[4592]: I0929 16:55:00.883316 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:55:00 crc kubenswrapper[4592]: I0929 16:55:00.883359 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:55:00 crc kubenswrapper[4592]: I0929 16:55:00.883819 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 16:55:00 crc kubenswrapper[4592]: I0929 16:55:00.883864 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230" gracePeriod=600 Sep 29 16:55:01 crc kubenswrapper[4592]: I0929 16:55:01.953704 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230" exitCode=0 Sep 29 16:55:01 crc kubenswrapper[4592]: I0929 16:55:01.953797 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230"} Sep 29 16:55:04 crc kubenswrapper[4592]: I0929 16:55:04.972051 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7"} Sep 29 16:55:06 crc kubenswrapper[4592]: I0929 16:55:06.988224 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerStarted","Data":"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90"} Sep 29 16:55:07 crc kubenswrapper[4592]: I0929 16:55:07.011637 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4c6m9" podStartSLOduration=5.421545045 podStartE2EDuration="1m22.01161597s" podCreationTimestamp="2025-09-29 16:53:45 +0000 UTC" firstStartedPulling="2025-09-29 16:53:47.304247013 +0000 UTC m=+157.452024694" lastFinishedPulling="2025-09-29 16:55:03.894317938 +0000 UTC m=+234.042095619" observedRunningTime="2025-09-29 16:55:07.005769585 +0000 UTC m=+237.153547266" watchObservedRunningTime="2025-09-29 16:55:07.01161597 +0000 UTC m=+237.159393651" Sep 29 16:55:07 crc kubenswrapper[4592]: I0929 16:55:07.994468 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerStarted","Data":"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78"} Sep 29 16:55:07 crc kubenswrapper[4592]: I0929 16:55:07.996213 4592 generic.go:334] "Generic (PLEG): container finished" podID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerID="2e73af0094a9ef41bb3ad8ab700460890cdf4c5f643ae68e151eea814b8a6739" exitCode=0 Sep 29 16:55:07 crc kubenswrapper[4592]: I0929 16:55:07.996255 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerDied","Data":"2e73af0094a9ef41bb3ad8ab700460890cdf4c5f643ae68e151eea814b8a6739"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.000528 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerStarted","Data":"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.003310 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerStarted","Data":"e618f887cd7c2384ebc07fe9c6c6a6d1d6064ae2b73186d4e23496793660e1c9"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.005420 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerStarted","Data":"fdf4febfa7376714764ae309df0c4eb7037f7a2ee6d8837c4d5d468d8d16ccc8"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.007072 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerStarted","Data":"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.031321 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerStarted","Data":"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926"} Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.032123 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2tf2t" podStartSLOduration=5.417235376 podStartE2EDuration="1m27.032102337s" podCreationTimestamp="2025-09-29 16:53:41 +0000 UTC" firstStartedPulling="2025-09-29 16:53:45.094333055 +0000 UTC m=+155.242110746" lastFinishedPulling="2025-09-29 16:55:06.709200026 +0000 UTC m=+236.856977707" observedRunningTime="2025-09-29 16:55:08.028933447 +0000 UTC m=+238.176711128" watchObservedRunningTime="2025-09-29 16:55:08.032102337 +0000 UTC m=+238.179880018" Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.075905 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-tqwzq" podStartSLOduration=4.042227195 podStartE2EDuration="1m26.075886504s" podCreationTimestamp="2025-09-29 16:53:42 +0000 UTC" firstStartedPulling="2025-09-29 16:53:44.98806774 +0000 UTC m=+155.135845421" lastFinishedPulling="2025-09-29 16:55:07.021727049 +0000 UTC m=+237.169504730" observedRunningTime="2025-09-29 16:55:08.073909641 +0000 UTC m=+238.221687322" watchObservedRunningTime="2025-09-29 16:55:08.075886504 +0000 UTC m=+238.223664185" Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.101247 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hdhfp" podStartSLOduration=5.39203061 podStartE2EDuration="1m27.101228346s" podCreationTimestamp="2025-09-29 16:53:41 +0000 UTC" firstStartedPulling="2025-09-29 16:53:44.9718859 +0000 UTC m=+155.119663581" lastFinishedPulling="2025-09-29 16:55:06.681083636 +0000 UTC m=+236.828861317" observedRunningTime="2025-09-29 16:55:08.099238173 +0000 UTC m=+238.247015864" watchObservedRunningTime="2025-09-29 16:55:08.101228346 +0000 UTC m=+238.249006027" Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.123818 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r9mwk" podStartSLOduration=4.257959445 podStartE2EDuration="1m25.123799911s" podCreationTimestamp="2025-09-29 16:53:43 +0000 UTC" firstStartedPulling="2025-09-29 16:53:46.126163703 +0000 UTC m=+156.273941384" lastFinishedPulling="2025-09-29 16:55:06.992004169 +0000 UTC m=+237.139781850" observedRunningTime="2025-09-29 16:55:08.121824668 +0000 UTC m=+238.269602349" watchObservedRunningTime="2025-09-29 16:55:08.123799911 +0000 UTC m=+238.271577592" Sep 29 16:55:08 crc kubenswrapper[4592]: I0929 16:55:08.162435 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k498t" podStartSLOduration=4.312452241 podStartE2EDuration="1m26.162417584s" podCreationTimestamp="2025-09-29 16:53:42 +0000 UTC" firstStartedPulling="2025-09-29 16:53:45.007974181 +0000 UTC m=+155.155751862" lastFinishedPulling="2025-09-29 16:55:06.857939524 +0000 UTC m=+237.005717205" observedRunningTime="2025-09-29 16:55:08.160994438 +0000 UTC m=+238.308772119" watchObservedRunningTime="2025-09-29 16:55:08.162417584 +0000 UTC m=+238.310195265" Sep 29 16:55:09 crc kubenswrapper[4592]: I0929 16:55:09.038603 4592 generic.go:334] "Generic (PLEG): container finished" podID="4829df1e-952f-4db8-b3ee-218715359da1" containerID="fdf4febfa7376714764ae309df0c4eb7037f7a2ee6d8837c4d5d468d8d16ccc8" exitCode=0 Sep 29 16:55:09 crc kubenswrapper[4592]: I0929 16:55:09.038645 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerDied","Data":"fdf4febfa7376714764ae309df0c4eb7037f7a2ee6d8837c4d5d468d8d16ccc8"} Sep 29 16:55:10 crc kubenswrapper[4592]: I0929 16:55:10.045107 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerStarted","Data":"a18123bd0992c081f928fae7140b75a5003d52caa533cb6b9ca3468710d06b92"} Sep 29 16:55:10 crc kubenswrapper[4592]: I0929 16:55:10.046978 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerStarted","Data":"133adfa8cbfc46894dedb9cec84700bc563ea862b1d9d0a4ebf8067374d9fcf9"} Sep 29 16:55:10 crc kubenswrapper[4592]: I0929 16:55:10.071089 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nhcpn" podStartSLOduration=2.8574500069999997 podStartE2EDuration="1m25.07106931s" podCreationTimestamp="2025-09-29 16:53:45 +0000 UTC" firstStartedPulling="2025-09-29 16:53:47.284372523 +0000 UTC m=+157.432150204" lastFinishedPulling="2025-09-29 16:55:09.497991836 +0000 UTC m=+239.645769507" observedRunningTime="2025-09-29 16:55:10.067483687 +0000 UTC m=+240.215261398" watchObservedRunningTime="2025-09-29 16:55:10.07106931 +0000 UTC m=+240.218846991" Sep 29 16:55:10 crc kubenswrapper[4592]: I0929 16:55:10.099621 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lb49j" podStartSLOduration=3.147206793 podStartE2EDuration="1m26.099602843s" podCreationTimestamp="2025-09-29 16:53:44 +0000 UTC" firstStartedPulling="2025-09-29 16:53:46.21499453 +0000 UTC m=+156.362772211" lastFinishedPulling="2025-09-29 16:55:09.16739058 +0000 UTC m=+239.315168261" observedRunningTime="2025-09-29 16:55:10.09919305 +0000 UTC m=+240.246970741" watchObservedRunningTime="2025-09-29 16:55:10.099602843 +0000 UTC m=+240.247380524" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.004047 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.005300 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.235258 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.286618 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.286693 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.289696 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.334654 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.616549 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.616830 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.666221 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.828753 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.828800 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:12 crc kubenswrapper[4592]: I0929 16:55:12.864265 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:13 crc kubenswrapper[4592]: I0929 16:55:13.131486 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:13 crc kubenswrapper[4592]: I0929 16:55:13.134687 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:13 crc kubenswrapper[4592]: I0929 16:55:13.143676 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.280313 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.280367 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.331286 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.750324 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.750694 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:14 crc kubenswrapper[4592]: I0929 16:55:14.808180 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.124673 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.143870 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.440335 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.440934 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2tf2t" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="registry-server" containerID="cri-o://2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78" gracePeriod=2 Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.572299 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.572357 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.617213 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.863046 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.917552 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.917821 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.948836 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content\") pod \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.948920 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities\") pod \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.949008 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvgtj\" (UniqueName: \"kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj\") pod \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\" (UID: \"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852\") " Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.949960 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities" (OuterVolumeSpecName: "utilities") pod "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" (UID: "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.955201 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj" (OuterVolumeSpecName: "kube-api-access-gvgtj") pod "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" (UID: "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852"). InnerVolumeSpecName "kube-api-access-gvgtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.959516 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:15 crc kubenswrapper[4592]: I0929 16:55:15.993731 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" (UID: "6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.050692 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvgtj\" (UniqueName: \"kubernetes.io/projected/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-kube-api-access-gvgtj\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.050728 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.050740 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.074971 4592 generic.go:334] "Generic (PLEG): container finished" podID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerID="2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78" exitCode=0 Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.075041 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2tf2t" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.075039 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerDied","Data":"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78"} Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.075099 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2tf2t" event={"ID":"6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852","Type":"ContainerDied","Data":"9d01575d567b4c02fd46d97a2719ae84467675bc179bd3adcf8bbc6a2b4ac64a"} Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.075119 4592 scope.go:117] "RemoveContainer" containerID="2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.098675 4592 scope.go:117] "RemoveContainer" containerID="2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.113195 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.116772 4592 scope.go:117] "RemoveContainer" containerID="e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.118902 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.122192 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2tf2t"] Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.132916 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.135334 4592 scope.go:117] "RemoveContainer" containerID="2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78" Sep 29 16:55:16 crc kubenswrapper[4592]: E0929 16:55:16.135681 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78\": container with ID starting with 2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78 not found: ID does not exist" containerID="2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.135714 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78"} err="failed to get container status \"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78\": rpc error: code = NotFound desc = could not find container \"2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78\": container with ID starting with 2554adad78c1b121b462931f901252acd4d2c6d88af283da6e9ef8416677bd78 not found: ID does not exist" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.135733 4592 scope.go:117] "RemoveContainer" containerID="2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf" Sep 29 16:55:16 crc kubenswrapper[4592]: E0929 16:55:16.136082 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf\": container with ID starting with 2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf not found: ID does not exist" containerID="2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.136100 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf"} err="failed to get container status \"2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf\": rpc error: code = NotFound desc = could not find container \"2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf\": container with ID starting with 2fb741c26b3696b67b621a3b33da1bae7a86efc79a768750516d291953f603bf not found: ID does not exist" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.136113 4592 scope.go:117] "RemoveContainer" containerID="e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685" Sep 29 16:55:16 crc kubenswrapper[4592]: E0929 16:55:16.136370 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685\": container with ID starting with e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685 not found: ID does not exist" containerID="e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685" Sep 29 16:55:16 crc kubenswrapper[4592]: I0929 16:55:16.136466 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685"} err="failed to get container status \"e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685\": rpc error: code = NotFound desc = could not find container \"e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685\": container with ID starting with e9b72961752d5011534403f56204356730ca2525fab095f54f3511e6f8013685 not found: ID does not exist" Sep 29 16:55:17 crc kubenswrapper[4592]: I0929 16:55:17.192652 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" path="/var/lib/kubelet/pods/6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852/volumes" Sep 29 16:55:17 crc kubenswrapper[4592]: I0929 16:55:17.240247 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:55:17 crc kubenswrapper[4592]: I0929 16:55:17.240548 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-tqwzq" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="registry-server" containerID="cri-o://e618f887cd7c2384ebc07fe9c6c6a6d1d6064ae2b73186d4e23496793660e1c9" gracePeriod=2 Sep 29 16:55:17 crc kubenswrapper[4592]: I0929 16:55:17.839964 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:55:17 crc kubenswrapper[4592]: I0929 16:55:17.840445 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lb49j" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="registry-server" containerID="cri-o://133adfa8cbfc46894dedb9cec84700bc563ea862b1d9d0a4ebf8067374d9fcf9" gracePeriod=2 Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.097512 4592 generic.go:334] "Generic (PLEG): container finished" podID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerID="e618f887cd7c2384ebc07fe9c6c6a6d1d6064ae2b73186d4e23496793660e1c9" exitCode=0 Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.097634 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerDied","Data":"e618f887cd7c2384ebc07fe9c6c6a6d1d6064ae2b73186d4e23496793660e1c9"} Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.100304 4592 generic.go:334] "Generic (PLEG): container finished" podID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerID="133adfa8cbfc46894dedb9cec84700bc563ea862b1d9d0a4ebf8067374d9fcf9" exitCode=0 Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.100395 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerDied","Data":"133adfa8cbfc46894dedb9cec84700bc563ea862b1d9d0a4ebf8067374d9fcf9"} Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.141134 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.213708 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.268294 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities\") pod \"81890027-503c-4d1d-94c7-5ce8bdbef726\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.268416 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn6zr\" (UniqueName: \"kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr\") pod \"81890027-503c-4d1d-94c7-5ce8bdbef726\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.268460 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content\") pod \"81890027-503c-4d1d-94c7-5ce8bdbef726\" (UID: \"81890027-503c-4d1d-94c7-5ce8bdbef726\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.269209 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities" (OuterVolumeSpecName: "utilities") pod "81890027-503c-4d1d-94c7-5ce8bdbef726" (UID: "81890027-503c-4d1d-94c7-5ce8bdbef726"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.282518 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr" (OuterVolumeSpecName: "kube-api-access-mn6zr") pod "81890027-503c-4d1d-94c7-5ce8bdbef726" (UID: "81890027-503c-4d1d-94c7-5ce8bdbef726"). InnerVolumeSpecName "kube-api-access-mn6zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.314220 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81890027-503c-4d1d-94c7-5ce8bdbef726" (UID: "81890027-503c-4d1d-94c7-5ce8bdbef726"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.369838 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities\") pod \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.369909 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content\") pod \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.370005 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wfxn\" (UniqueName: \"kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn\") pod \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\" (UID: \"e493af7b-bc6d-4f70-9c4d-65a4b52503fa\") " Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.370213 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.370232 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn6zr\" (UniqueName: \"kubernetes.io/projected/81890027-503c-4d1d-94c7-5ce8bdbef726-kube-api-access-mn6zr\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.370242 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81890027-503c-4d1d-94c7-5ce8bdbef726-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.371074 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities" (OuterVolumeSpecName: "utilities") pod "e493af7b-bc6d-4f70-9c4d-65a4b52503fa" (UID: "e493af7b-bc6d-4f70-9c4d-65a4b52503fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.373496 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn" (OuterVolumeSpecName: "kube-api-access-4wfxn") pod "e493af7b-bc6d-4f70-9c4d-65a4b52503fa" (UID: "e493af7b-bc6d-4f70-9c4d-65a4b52503fa"). InnerVolumeSpecName "kube-api-access-4wfxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.382613 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e493af7b-bc6d-4f70-9c4d-65a4b52503fa" (UID: "e493af7b-bc6d-4f70-9c4d-65a4b52503fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.471812 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wfxn\" (UniqueName: \"kubernetes.io/projected/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-kube-api-access-4wfxn\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.471847 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:18 crc kubenswrapper[4592]: I0929 16:55:18.471860 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e493af7b-bc6d-4f70-9c4d-65a4b52503fa-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.106926 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lb49j" event={"ID":"e493af7b-bc6d-4f70-9c4d-65a4b52503fa","Type":"ContainerDied","Data":"7d654e55ca74ca82f08d5277b9a5a2c39b65b5159d3e31d8a2976a001da87114"} Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.106983 4592 scope.go:117] "RemoveContainer" containerID="133adfa8cbfc46894dedb9cec84700bc563ea862b1d9d0a4ebf8067374d9fcf9" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.106939 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lb49j" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.109087 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-tqwzq" event={"ID":"81890027-503c-4d1d-94c7-5ce8bdbef726","Type":"ContainerDied","Data":"320fa111145dc4d47a15d4571bf82eda2d5d4485dfdd99d7ef9591ceea3e2f5f"} Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.109239 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-tqwzq" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.126018 4592 scope.go:117] "RemoveContainer" containerID="2e73af0094a9ef41bb3ad8ab700460890cdf4c5f643ae68e151eea814b8a6739" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.143220 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.145756 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lb49j"] Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.169658 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.173917 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-tqwzq"] Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.174315 4592 scope.go:117] "RemoveContainer" containerID="8caac1d9ebb8445ec641dc0c77ff146ad6d80561b328cce8ce1f63d65103f283" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.189334 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" path="/var/lib/kubelet/pods/81890027-503c-4d1d-94c7-5ce8bdbef726/volumes" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.189952 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" path="/var/lib/kubelet/pods/e493af7b-bc6d-4f70-9c4d-65a4b52503fa/volumes" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.190740 4592 scope.go:117] "RemoveContainer" containerID="e618f887cd7c2384ebc07fe9c6c6a6d1d6064ae2b73186d4e23496793660e1c9" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.209463 4592 scope.go:117] "RemoveContainer" containerID="ea7be04f532b98ea091f0651763ae5684e6694f9dd2ec3ecd243608f1363f3b2" Sep 29 16:55:19 crc kubenswrapper[4592]: I0929 16:55:19.222759 4592 scope.go:117] "RemoveContainer" containerID="5f10f47d6c628b3cb26d461feb568636c7cbf8777c11655834f6a87c83eeff63" Sep 29 16:55:20 crc kubenswrapper[4592]: I0929 16:55:20.240107 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:55:20 crc kubenswrapper[4592]: I0929 16:55:20.240471 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nhcpn" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="registry-server" containerID="cri-o://a18123bd0992c081f928fae7140b75a5003d52caa533cb6b9ca3468710d06b92" gracePeriod=2 Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.130770 4592 generic.go:334] "Generic (PLEG): container finished" podID="4829df1e-952f-4db8-b3ee-218715359da1" containerID="a18123bd0992c081f928fae7140b75a5003d52caa533cb6b9ca3468710d06b92" exitCode=0 Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.130956 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerDied","Data":"a18123bd0992c081f928fae7140b75a5003d52caa533cb6b9ca3468710d06b92"} Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.190589 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.247712 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content\") pod \"4829df1e-952f-4db8-b3ee-218715359da1\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.247826 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities\") pod \"4829df1e-952f-4db8-b3ee-218715359da1\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.247879 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rc4mg\" (UniqueName: \"kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg\") pod \"4829df1e-952f-4db8-b3ee-218715359da1\" (UID: \"4829df1e-952f-4db8-b3ee-218715359da1\") " Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.248874 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities" (OuterVolumeSpecName: "utilities") pod "4829df1e-952f-4db8-b3ee-218715359da1" (UID: "4829df1e-952f-4db8-b3ee-218715359da1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.254274 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg" (OuterVolumeSpecName: "kube-api-access-rc4mg") pod "4829df1e-952f-4db8-b3ee-218715359da1" (UID: "4829df1e-952f-4db8-b3ee-218715359da1"). InnerVolumeSpecName "kube-api-access-rc4mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.333611 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4829df1e-952f-4db8-b3ee-218715359da1" (UID: "4829df1e-952f-4db8-b3ee-218715359da1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.348798 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.348829 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4829df1e-952f-4db8-b3ee-218715359da1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:22 crc kubenswrapper[4592]: I0929 16:55:22.348842 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rc4mg\" (UniqueName: \"kubernetes.io/projected/4829df1e-952f-4db8-b3ee-218715359da1-kube-api-access-rc4mg\") on node \"crc\" DevicePath \"\"" Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.137752 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nhcpn" event={"ID":"4829df1e-952f-4db8-b3ee-218715359da1","Type":"ContainerDied","Data":"1ca33f9bd8fa7a4137c4abed51a7fa17ef404091cc0b5df886b07f880eff2d2c"} Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.138073 4592 scope.go:117] "RemoveContainer" containerID="a18123bd0992c081f928fae7140b75a5003d52caa533cb6b9ca3468710d06b92" Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.137899 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nhcpn" Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.156932 4592 scope.go:117] "RemoveContainer" containerID="fdf4febfa7376714764ae309df0c4eb7037f7a2ee6d8837c4d5d468d8d16ccc8" Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.171668 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.175937 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nhcpn"] Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.188797 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4829df1e-952f-4db8-b3ee-218715359da1" path="/var/lib/kubelet/pods/4829df1e-952f-4db8-b3ee-218715359da1/volumes" Sep 29 16:55:23 crc kubenswrapper[4592]: I0929 16:55:23.195527 4592 scope.go:117] "RemoveContainer" containerID="5868b4369e29f8d5f77bf2b7c2a1d7df4e2354c8e0e6a5fecb50408ea82fd439" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.648740 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.649444 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hdhfp" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="registry-server" containerID="cri-o://fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a" gracePeriod=30 Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.665592 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.667165 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k498t" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="registry-server" containerID="cri-o://41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926" gracePeriod=30 Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.673542 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.673780 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" containerID="cri-o://7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5" gracePeriod=30 Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.684780 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.685018 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r9mwk" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="registry-server" containerID="cri-o://54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3" gracePeriod=30 Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.693985 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq85k"] Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694261 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694278 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694296 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694308 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694327 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694335 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694346 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694354 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694363 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694373 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694384 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694395 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694406 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694415 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694426 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694436 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694452 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694462 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694476 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694483 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="extract-content" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694495 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694502 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="extract-utilities" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694513 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694521 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: E0929 16:56:14.694535 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e581d2d-b885-4469-b791-f468419d0b66" containerName="pruner" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694542 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e581d2d-b885-4469-b791-f468419d0b66" containerName="pruner" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694662 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a905fc1-b5a4-4e2d-9e8f-3e6811b9b852" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694677 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="81890027-503c-4d1d-94c7-5ce8bdbef726" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694686 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e581d2d-b885-4469-b791-f468419d0b66" containerName="pruner" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694695 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e493af7b-bc6d-4f70-9c4d-65a4b52503fa" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.694709 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4829df1e-952f-4db8-b3ee-218715359da1" containerName="registry-server" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.695176 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.700191 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.700406 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4c6m9" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="registry-server" containerID="cri-o://3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90" gracePeriod=30 Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.711855 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq85k"] Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.858943 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.859289 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hrwt\" (UniqueName: \"kubernetes.io/projected/d9110599-7f42-4970-93fa-89f37c84fad3-kube-api-access-9hrwt\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.859338 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.960028 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.960090 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hrwt\" (UniqueName: \"kubernetes.io/projected/d9110599-7f42-4970-93fa-89f37c84fad3-kube-api-access-9hrwt\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.960166 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.963206 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.979245 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/d9110599-7f42-4970-93fa-89f37c84fad3-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:14 crc kubenswrapper[4592]: I0929 16:56:14.990519 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hrwt\" (UniqueName: \"kubernetes.io/projected/d9110599-7f42-4970-93fa-89f37c84fad3-kube-api-access-9hrwt\") pod \"marketplace-operator-79b997595-hq85k\" (UID: \"d9110599-7f42-4970-93fa-89f37c84fad3\") " pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.015744 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.056572 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.167724 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content\") pod \"c0ffea6d-0977-4552-961e-fc318ff7db95\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.167840 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities\") pod \"c0ffea6d-0977-4552-961e-fc318ff7db95\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.167893 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdslw\" (UniqueName: \"kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw\") pod \"c0ffea6d-0977-4552-961e-fc318ff7db95\" (UID: \"c0ffea6d-0977-4552-961e-fc318ff7db95\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.171035 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities" (OuterVolumeSpecName: "utilities") pod "c0ffea6d-0977-4552-961e-fc318ff7db95" (UID: "c0ffea6d-0977-4552-961e-fc318ff7db95"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.175456 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.183525 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.197479 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw" (OuterVolumeSpecName: "kube-api-access-zdslw") pod "c0ffea6d-0977-4552-961e-fc318ff7db95" (UID: "c0ffea6d-0977-4552-961e-fc318ff7db95"). InnerVolumeSpecName "kube-api-access-zdslw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.272804 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.272844 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdslw\" (UniqueName: \"kubernetes.io/projected/c0ffea6d-0977-4552-961e-fc318ff7db95-kube-api-access-zdslw\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.283396 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0ffea6d-0977-4552-961e-fc318ff7db95" (UID: "c0ffea6d-0977-4552-961e-fc318ff7db95"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.358216 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374248 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities\") pod \"836078bb-4975-4487-9404-6a3e4348292b\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374381 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca\") pod \"542d754d-bd15-40b7-8208-876f318413a9\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374421 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flqvh\" (UniqueName: \"kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh\") pod \"542d754d-bd15-40b7-8208-876f318413a9\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374447 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics\") pod \"542d754d-bd15-40b7-8208-876f318413a9\" (UID: \"542d754d-bd15-40b7-8208-876f318413a9\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374477 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt2p4\" (UniqueName: \"kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4\") pod \"836078bb-4975-4487-9404-6a3e4348292b\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374504 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content\") pod \"836078bb-4975-4487-9404-6a3e4348292b\" (UID: \"836078bb-4975-4487-9404-6a3e4348292b\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.374823 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0ffea6d-0977-4552-961e-fc318ff7db95-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.377233 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities" (OuterVolumeSpecName: "utilities") pod "836078bb-4975-4487-9404-6a3e4348292b" (UID: "836078bb-4975-4487-9404-6a3e4348292b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.378257 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "542d754d-bd15-40b7-8208-876f318413a9" (UID: "542d754d-bd15-40b7-8208-876f318413a9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.386732 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "542d754d-bd15-40b7-8208-876f318413a9" (UID: "542d754d-bd15-40b7-8208-876f318413a9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.396771 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh" (OuterVolumeSpecName: "kube-api-access-flqvh") pod "542d754d-bd15-40b7-8208-876f318413a9" (UID: "542d754d-bd15-40b7-8208-876f318413a9"). InnerVolumeSpecName "kube-api-access-flqvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.405178 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4" (OuterVolumeSpecName: "kube-api-access-nt2p4") pod "836078bb-4975-4487-9404-6a3e4348292b" (UID: "836078bb-4975-4487-9404-6a3e4348292b"). InnerVolumeSpecName "kube-api-access-nt2p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.428382 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.428704 4592 generic.go:334] "Generic (PLEG): container finished" podID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerID="54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3" exitCode=0 Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.428778 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9mwk" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.429022 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerDied","Data":"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.429046 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9mwk" event={"ID":"c9b6b21e-615b-458b-ae60-5e8535dea0c1","Type":"ContainerDied","Data":"d3c53260f2112428596b79ff36eb3c564c9946ba837984dbe0b032123fbc4579"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.429061 4592 scope.go:117] "RemoveContainer" containerID="54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.430907 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hq85k"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.438942 4592 generic.go:334] "Generic (PLEG): container finished" podID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerID="3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90" exitCode=0 Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.439204 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerDied","Data":"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.439285 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4c6m9" event={"ID":"9611b65a-9551-46c6-a4ae-70c8d6eb0b54","Type":"ContainerDied","Data":"a2aa740d209e198a94589d8c0a023dcf22b1308676175bd9eac2f368779ed80c"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.439357 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4c6m9" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.449603 4592 generic.go:334] "Generic (PLEG): container finished" podID="836078bb-4975-4487-9404-6a3e4348292b" containerID="41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926" exitCode=0 Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.450373 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerDied","Data":"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.450428 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k498t" event={"ID":"836078bb-4975-4487-9404-6a3e4348292b","Type":"ContainerDied","Data":"d3f95a6223ac1a3a9dee96cdc54e839ff77e6bed332432e0388243dd595e2268"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.450509 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k498t" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.462751 4592 scope.go:117] "RemoveContainer" containerID="5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.472766 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" event={"ID":"542d754d-bd15-40b7-8208-876f318413a9","Type":"ContainerDied","Data":"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.472849 4592 generic.go:334] "Generic (PLEG): container finished" podID="542d754d-bd15-40b7-8208-876f318413a9" containerID="7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5" exitCode=0 Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.472897 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.472923 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hwj5f" event={"ID":"542d754d-bd15-40b7-8208-876f318413a9","Type":"ContainerDied","Data":"6a0fb8fb40764ac1c80a5d1ef803e7b918a1ded96027232a3391ac6a398a1b69"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.487550 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities\") pod \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.487641 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content\") pod \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.487746 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97mr6\" (UniqueName: \"kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6\") pod \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\" (UID: \"c9b6b21e-615b-458b-ae60-5e8535dea0c1\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.487985 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.488001 4592 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/542d754d-bd15-40b7-8208-876f318413a9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.488013 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flqvh\" (UniqueName: \"kubernetes.io/projected/542d754d-bd15-40b7-8208-876f318413a9-kube-api-access-flqvh\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.488024 4592 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/542d754d-bd15-40b7-8208-876f318413a9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.488036 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt2p4\" (UniqueName: \"kubernetes.io/projected/836078bb-4975-4487-9404-6a3e4348292b-kube-api-access-nt2p4\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.489210 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities" (OuterVolumeSpecName: "utilities") pod "c9b6b21e-615b-458b-ae60-5e8535dea0c1" (UID: "c9b6b21e-615b-458b-ae60-5e8535dea0c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.492945 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6" (OuterVolumeSpecName: "kube-api-access-97mr6") pod "c9b6b21e-615b-458b-ae60-5e8535dea0c1" (UID: "c9b6b21e-615b-458b-ae60-5e8535dea0c1"). InnerVolumeSpecName "kube-api-access-97mr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.494060 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "836078bb-4975-4487-9404-6a3e4348292b" (UID: "836078bb-4975-4487-9404-6a3e4348292b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.496522 4592 generic.go:334] "Generic (PLEG): container finished" podID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerID="fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a" exitCode=0 Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.496563 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerDied","Data":"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.496612 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hdhfp" event={"ID":"c0ffea6d-0977-4552-961e-fc318ff7db95","Type":"ContainerDied","Data":"6b465fe3056d2a5cecc4b2f79ff52e8bdec3bfb1fc9e7851c6385ee295c88965"} Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.496627 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hdhfp" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.505421 4592 scope.go:117] "RemoveContainer" containerID="b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.520345 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.525050 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c9b6b21e-615b-458b-ae60-5e8535dea0c1" (UID: "c9b6b21e-615b-458b-ae60-5e8535dea0c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.532260 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-hwj5f"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.536978 4592 scope.go:117] "RemoveContainer" containerID="54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.537629 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3\": container with ID starting with 54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3 not found: ID does not exist" containerID="54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.537670 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3"} err="failed to get container status \"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3\": rpc error: code = NotFound desc = could not find container \"54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3\": container with ID starting with 54b80ef5f07d7244d508a950c5a6241435547e946b645cdcbfd1af9c1fba0ac3 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.537698 4592 scope.go:117] "RemoveContainer" containerID="5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.538038 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92\": container with ID starting with 5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92 not found: ID does not exist" containerID="5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.538074 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92"} err="failed to get container status \"5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92\": rpc error: code = NotFound desc = could not find container \"5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92\": container with ID starting with 5ecbe7d8c0e8df5ef2bc2ec7470513c2b43c531d88750842e86bf3fc0f040f92 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.538099 4592 scope.go:117] "RemoveContainer" containerID="b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.538410 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109\": container with ID starting with b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109 not found: ID does not exist" containerID="b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.538444 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109"} err="failed to get container status \"b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109\": rpc error: code = NotFound desc = could not find container \"b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109\": container with ID starting with b1a7dc64834d132a60be9bff1d80a8efbd3675bb1d4c651c9669baa73fbe7109 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.538466 4592 scope.go:117] "RemoveContainer" containerID="3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.543490 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.550889 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hdhfp"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.562753 4592 scope.go:117] "RemoveContainer" containerID="2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.581428 4592 scope.go:117] "RemoveContainer" containerID="125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.588998 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities\") pod \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589047 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content\") pod \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589077 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-562jq\" (UniqueName: \"kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq\") pod \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\" (UID: \"9611b65a-9551-46c6-a4ae-70c8d6eb0b54\") " Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589376 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589387 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97mr6\" (UniqueName: \"kubernetes.io/projected/c9b6b21e-615b-458b-ae60-5e8535dea0c1-kube-api-access-97mr6\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589398 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/836078bb-4975-4487-9404-6a3e4348292b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.589406 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c9b6b21e-615b-458b-ae60-5e8535dea0c1-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.590521 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities" (OuterVolumeSpecName: "utilities") pod "9611b65a-9551-46c6-a4ae-70c8d6eb0b54" (UID: "9611b65a-9551-46c6-a4ae-70c8d6eb0b54"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.592468 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq" (OuterVolumeSpecName: "kube-api-access-562jq") pod "9611b65a-9551-46c6-a4ae-70c8d6eb0b54" (UID: "9611b65a-9551-46c6-a4ae-70c8d6eb0b54"). InnerVolumeSpecName "kube-api-access-562jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.598786 4592 scope.go:117] "RemoveContainer" containerID="3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.600442 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90\": container with ID starting with 3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90 not found: ID does not exist" containerID="3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.600582 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90"} err="failed to get container status \"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90\": rpc error: code = NotFound desc = could not find container \"3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90\": container with ID starting with 3bfe37c4f32bae347aee8219cedcadd2b97babf80344b157f77ada9b7cb16a90 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.600608 4592 scope.go:117] "RemoveContainer" containerID="2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.600871 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc\": container with ID starting with 2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc not found: ID does not exist" containerID="2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.600893 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc"} err="failed to get container status \"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc\": rpc error: code = NotFound desc = could not find container \"2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc\": container with ID starting with 2de724d79d9f8db586b297011d1358b2abb22eca1d9a364ebde7df423ed15fbc not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.600907 4592 scope.go:117] "RemoveContainer" containerID="125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.601544 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41\": container with ID starting with 125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41 not found: ID does not exist" containerID="125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.601563 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41"} err="failed to get container status \"125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41\": rpc error: code = NotFound desc = could not find container \"125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41\": container with ID starting with 125b3af7958801c929a41944e9cffde51c1fcba7b07fbb8527389b4f41e7cb41 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.601579 4592 scope.go:117] "RemoveContainer" containerID="41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.629539 4592 scope.go:117] "RemoveContainer" containerID="69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.658978 4592 scope.go:117] "RemoveContainer" containerID="3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.680403 4592 scope.go:117] "RemoveContainer" containerID="41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.680954 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926\": container with ID starting with 41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926 not found: ID does not exist" containerID="41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.680992 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926"} err="failed to get container status \"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926\": rpc error: code = NotFound desc = could not find container \"41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926\": container with ID starting with 41e04572c2cc555e768e8ef205a9d1a36624f3c8913c695e8032c46ae361c926 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.681018 4592 scope.go:117] "RemoveContainer" containerID="69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.681307 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24\": container with ID starting with 69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24 not found: ID does not exist" containerID="69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.681339 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24"} err="failed to get container status \"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24\": rpc error: code = NotFound desc = could not find container \"69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24\": container with ID starting with 69df71014ceb4780de5e9cb6033fc3abc1df4badbe233d0b2bf9aea83af88d24 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.681358 4592 scope.go:117] "RemoveContainer" containerID="3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.681609 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c\": container with ID starting with 3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c not found: ID does not exist" containerID="3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.681635 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c"} err="failed to get container status \"3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c\": rpc error: code = NotFound desc = could not find container \"3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c\": container with ID starting with 3becf378fe24c6b89a4dbfba263c5bf1490d5303a45c7e4c6c9d7fdd05f0e29c not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.681651 4592 scope.go:117] "RemoveContainer" containerID="7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.690990 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.691023 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-562jq\" (UniqueName: \"kubernetes.io/projected/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-kube-api-access-562jq\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.698790 4592 scope.go:117] "RemoveContainer" containerID="7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.699267 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5\": container with ID starting with 7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5 not found: ID does not exist" containerID="7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.699302 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5"} err="failed to get container status \"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5\": rpc error: code = NotFound desc = could not find container \"7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5\": container with ID starting with 7026bdd8a2a051ed88085ce639d644edf960410bfd3508aa320156f0838654f5 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.699330 4592 scope.go:117] "RemoveContainer" containerID="fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.702736 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9611b65a-9551-46c6-a4ae-70c8d6eb0b54" (UID: "9611b65a-9551-46c6-a4ae-70c8d6eb0b54"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.712743 4592 scope.go:117] "RemoveContainer" containerID="8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.728240 4592 scope.go:117] "RemoveContainer" containerID="87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.740618 4592 scope.go:117] "RemoveContainer" containerID="fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.742313 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a\": container with ID starting with fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a not found: ID does not exist" containerID="fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.742344 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a"} err="failed to get container status \"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a\": rpc error: code = NotFound desc = could not find container \"fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a\": container with ID starting with fdb2e04b41fdc9675ed6a16f486dacdf1b3295c71c9dccfdc111f8ba10a98d7a not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.742365 4592 scope.go:117] "RemoveContainer" containerID="8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.742584 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5\": container with ID starting with 8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5 not found: ID does not exist" containerID="8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.742609 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5"} err="failed to get container status \"8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5\": rpc error: code = NotFound desc = could not find container \"8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5\": container with ID starting with 8a5981bcdc455d12508ff9dabfa8625642bd149cf84be7303f0af783b50492a5 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.742628 4592 scope.go:117] "RemoveContainer" containerID="87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4" Sep 29 16:56:15 crc kubenswrapper[4592]: E0929 16:56:15.742924 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4\": container with ID starting with 87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4 not found: ID does not exist" containerID="87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.742977 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4"} err="failed to get container status \"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4\": rpc error: code = NotFound desc = could not find container \"87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4\": container with ID starting with 87e711d9fe3ba4780baa95c8a432972643886cce4a0ba33654a087dabdcadbf4 not found: ID does not exist" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.755201 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.759394 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9mwk"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.792569 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9611b65a-9551-46c6-a4ae-70c8d6eb0b54-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.835309 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.840541 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k498t"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.851759 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:56:15 crc kubenswrapper[4592]: I0929 16:56:15.854600 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4c6m9"] Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.200655 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.511410 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" event={"ID":"d9110599-7f42-4970-93fa-89f37c84fad3","Type":"ContainerStarted","Data":"7999c3e0aa74fda17376de9d5cd8782f48a1172440912f7d86ebff9721df4797"} Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.511454 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" event={"ID":"d9110599-7f42-4970-93fa-89f37c84fad3","Type":"ContainerStarted","Data":"7657a532fc27fa1d0761f9b630b5ea3ca243d9de48626d5f61d63aa69b257d7c"} Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.512600 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.517859 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.534715 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-hq85k" podStartSLOduration=2.53469274 podStartE2EDuration="2.53469274s" podCreationTimestamp="2025-09-29 16:56:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:56:16.531812533 +0000 UTC m=+306.679590234" watchObservedRunningTime="2025-09-29 16:56:16.53469274 +0000 UTC m=+306.682470421" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876166 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d2j4d"] Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876393 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876411 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876426 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876435 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876444 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876452 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876460 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876466 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876474 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876480 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876488 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876494 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876502 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876508 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876526 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876532 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876539 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876545 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876552 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876558 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876566 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876572 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876578 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876585 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="extract-content" Sep 29 16:56:16 crc kubenswrapper[4592]: E0929 16:56:16.876591 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876597 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="extract-utilities" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876679 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="836078bb-4975-4487-9404-6a3e4348292b" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876689 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="542d754d-bd15-40b7-8208-876f318413a9" containerName="marketplace-operator" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876695 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876704 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.876713 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" containerName="registry-server" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.877418 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.879605 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Sep 29 16:56:16 crc kubenswrapper[4592]: I0929 16:56:16.883718 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2j4d"] Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.009588 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-catalog-content\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.009949 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbzz7\" (UniqueName: \"kubernetes.io/projected/baf334fc-312d-4264-a6e0-a4c2569421d2-kube-api-access-kbzz7\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.009995 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-utilities\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.077081 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-29m4q"] Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.078065 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.080013 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.091225 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29m4q"] Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.110895 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-catalog-content\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.110938 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbzz7\" (UniqueName: \"kubernetes.io/projected/baf334fc-312d-4264-a6e0-a4c2569421d2-kube-api-access-kbzz7\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.110978 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-utilities\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.111433 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-utilities\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.111673 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/baf334fc-312d-4264-a6e0-a4c2569421d2-catalog-content\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.133410 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbzz7\" (UniqueName: \"kubernetes.io/projected/baf334fc-312d-4264-a6e0-a4c2569421d2-kube-api-access-kbzz7\") pod \"redhat-marketplace-d2j4d\" (UID: \"baf334fc-312d-4264-a6e0-a4c2569421d2\") " pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.190013 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="542d754d-bd15-40b7-8208-876f318413a9" path="/var/lib/kubelet/pods/542d754d-bd15-40b7-8208-876f318413a9/volumes" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.190669 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="836078bb-4975-4487-9404-6a3e4348292b" path="/var/lib/kubelet/pods/836078bb-4975-4487-9404-6a3e4348292b/volumes" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.191542 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9611b65a-9551-46c6-a4ae-70c8d6eb0b54" path="/var/lib/kubelet/pods/9611b65a-9551-46c6-a4ae-70c8d6eb0b54/volumes" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.192840 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0ffea6d-0977-4552-961e-fc318ff7db95" path="/var/lib/kubelet/pods/c0ffea6d-0977-4552-961e-fc318ff7db95/volumes" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.193433 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9b6b21e-615b-458b-ae60-5e8535dea0c1" path="/var/lib/kubelet/pods/c9b6b21e-615b-458b-ae60-5e8535dea0c1/volumes" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.201228 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.212542 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-utilities\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.212637 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhl9z\" (UniqueName: \"kubernetes.io/projected/204d33ca-6209-484f-b882-14d0c4270129-kube-api-access-qhl9z\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.212754 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-catalog-content\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.314434 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-catalog-content\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.314575 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-utilities\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.314610 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhl9z\" (UniqueName: \"kubernetes.io/projected/204d33ca-6209-484f-b882-14d0c4270129-kube-api-access-qhl9z\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.316732 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-utilities\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.317172 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/204d33ca-6209-484f-b882-14d0c4270129-catalog-content\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.358117 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhl9z\" (UniqueName: \"kubernetes.io/projected/204d33ca-6209-484f-b882-14d0c4270129-kube-api-access-qhl9z\") pod \"redhat-operators-29m4q\" (UID: \"204d33ca-6209-484f-b882-14d0c4270129\") " pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.402489 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.567942 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-29m4q"] Sep 29 16:56:17 crc kubenswrapper[4592]: W0929 16:56:17.571892 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod204d33ca_6209_484f_b882_14d0c4270129.slice/crio-43125a33d35e2fb63265887fbc9fe8080f1cafba52670caf808e8240104c335c WatchSource:0}: Error finding container 43125a33d35e2fb63265887fbc9fe8080f1cafba52670caf808e8240104c335c: Status 404 returned error can't find the container with id 43125a33d35e2fb63265887fbc9fe8080f1cafba52670caf808e8240104c335c Sep 29 16:56:17 crc kubenswrapper[4592]: I0929 16:56:17.606788 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d2j4d"] Sep 29 16:56:17 crc kubenswrapper[4592]: W0929 16:56:17.625804 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbaf334fc_312d_4264_a6e0_a4c2569421d2.slice/crio-324f75e2f3a00954beeebe9e2092d7fd8ae490533d95af285d5bbc62b414ad97 WatchSource:0}: Error finding container 324f75e2f3a00954beeebe9e2092d7fd8ae490533d95af285d5bbc62b414ad97: Status 404 returned error can't find the container with id 324f75e2f3a00954beeebe9e2092d7fd8ae490533d95af285d5bbc62b414ad97 Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.531407 4592 generic.go:334] "Generic (PLEG): container finished" podID="204d33ca-6209-484f-b882-14d0c4270129" containerID="08a2858437a6882d308e5df67434d5f2b5effbd7b4ffd441999f6a38eb161a27" exitCode=0 Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.531499 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29m4q" event={"ID":"204d33ca-6209-484f-b882-14d0c4270129","Type":"ContainerDied","Data":"08a2858437a6882d308e5df67434d5f2b5effbd7b4ffd441999f6a38eb161a27"} Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.531530 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29m4q" event={"ID":"204d33ca-6209-484f-b882-14d0c4270129","Type":"ContainerStarted","Data":"43125a33d35e2fb63265887fbc9fe8080f1cafba52670caf808e8240104c335c"} Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.533902 4592 generic.go:334] "Generic (PLEG): container finished" podID="baf334fc-312d-4264-a6e0-a4c2569421d2" containerID="4f3ca8e8efc589fb76e3c1271f20761443f02d645aa254ef700608e2306dcfbb" exitCode=0 Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.534009 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2j4d" event={"ID":"baf334fc-312d-4264-a6e0-a4c2569421d2","Type":"ContainerDied","Data":"4f3ca8e8efc589fb76e3c1271f20761443f02d645aa254ef700608e2306dcfbb"} Sep 29 16:56:18 crc kubenswrapper[4592]: I0929 16:56:18.534078 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2j4d" event={"ID":"baf334fc-312d-4264-a6e0-a4c2569421d2","Type":"ContainerStarted","Data":"324f75e2f3a00954beeebe9e2092d7fd8ae490533d95af285d5bbc62b414ad97"} Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.281480 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b5x2p"] Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.282689 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.285873 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.290269 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5x2p"] Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.439228 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-utilities\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.439570 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-catalog-content\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.439666 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzvzm\" (UniqueName: \"kubernetes.io/projected/f1f1db21-cfd8-4071-8923-9a7b08eeb035-kube-api-access-hzvzm\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.481330 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ztq52"] Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.482794 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.486388 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.502090 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ztq52"] Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.539815 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2j4d" event={"ID":"baf334fc-312d-4264-a6e0-a4c2569421d2","Type":"ContainerStarted","Data":"dd8efb7ee80ba2e6a91ac0272393cd79fc385fcb0ef4c7172f39c9dd1c9f78a2"} Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.540674 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-utilities\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.540810 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-catalog-content\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.540862 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-utilities\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.540885 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzvzm\" (UniqueName: \"kubernetes.io/projected/f1f1db21-cfd8-4071-8923-9a7b08eeb035-kube-api-access-hzvzm\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.541257 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-utilities\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.541550 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1f1db21-cfd8-4071-8923-9a7b08eeb035-catalog-content\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.577667 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzvzm\" (UniqueName: \"kubernetes.io/projected/f1f1db21-cfd8-4071-8923-9a7b08eeb035-kube-api-access-hzvzm\") pod \"community-operators-b5x2p\" (UID: \"f1f1db21-cfd8-4071-8923-9a7b08eeb035\") " pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.606189 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.643823 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4fv4\" (UniqueName: \"kubernetes.io/projected/b766f454-e3ec-4c1c-b730-a4b9a4c47068-kube-api-access-d4fv4\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.644315 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-utilities\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.644374 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-catalog-content\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.645395 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-utilities\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.744978 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4fv4\" (UniqueName: \"kubernetes.io/projected/b766f454-e3ec-4c1c-b730-a4b9a4c47068-kube-api-access-d4fv4\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.745271 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-catalog-content\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.745701 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b766f454-e3ec-4c1c-b730-a4b9a4c47068-catalog-content\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.766926 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4fv4\" (UniqueName: \"kubernetes.io/projected/b766f454-e3ec-4c1c-b730-a4b9a4c47068-kube-api-access-d4fv4\") pod \"certified-operators-ztq52\" (UID: \"b766f454-e3ec-4c1c-b730-a4b9a4c47068\") " pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.800793 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:19 crc kubenswrapper[4592]: I0929 16:56:19.847575 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b5x2p"] Sep 29 16:56:19 crc kubenswrapper[4592]: W0929 16:56:19.859589 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1f1db21_cfd8_4071_8923_9a7b08eeb035.slice/crio-d9031f2115d5a0491d7346be7a8c83fae4f49add1cb71d442192084328c57d78 WatchSource:0}: Error finding container d9031f2115d5a0491d7346be7a8c83fae4f49add1cb71d442192084328c57d78: Status 404 returned error can't find the container with id d9031f2115d5a0491d7346be7a8c83fae4f49add1cb71d442192084328c57d78 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.245330 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ztq52"] Sep 29 16:56:20 crc kubenswrapper[4592]: W0929 16:56:20.254804 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb766f454_e3ec_4c1c_b730_a4b9a4c47068.slice/crio-d75dc266e6b5410a5f075946041189d0b0414f99afb34cb4248e33cd9407bf19 WatchSource:0}: Error finding container d75dc266e6b5410a5f075946041189d0b0414f99afb34cb4248e33cd9407bf19: Status 404 returned error can't find the container with id d75dc266e6b5410a5f075946041189d0b0414f99afb34cb4248e33cd9407bf19 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.549424 4592 generic.go:334] "Generic (PLEG): container finished" podID="b766f454-e3ec-4c1c-b730-a4b9a4c47068" containerID="c361b45f6dfe6b42e056f199daebef770acd539855be35e9d6e0d7c5347f7013" exitCode=0 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.549497 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztq52" event={"ID":"b766f454-e3ec-4c1c-b730-a4b9a4c47068","Type":"ContainerDied","Data":"c361b45f6dfe6b42e056f199daebef770acd539855be35e9d6e0d7c5347f7013"} Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.549529 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztq52" event={"ID":"b766f454-e3ec-4c1c-b730-a4b9a4c47068","Type":"ContainerStarted","Data":"d75dc266e6b5410a5f075946041189d0b0414f99afb34cb4248e33cd9407bf19"} Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.552413 4592 generic.go:334] "Generic (PLEG): container finished" podID="204d33ca-6209-484f-b882-14d0c4270129" containerID="7b0bb83324ea10cfdff6fa95bc57fa5cb5569349a8433f93b796bc0a480213cd" exitCode=0 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.552459 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29m4q" event={"ID":"204d33ca-6209-484f-b882-14d0c4270129","Type":"ContainerDied","Data":"7b0bb83324ea10cfdff6fa95bc57fa5cb5569349a8433f93b796bc0a480213cd"} Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.556018 4592 generic.go:334] "Generic (PLEG): container finished" podID="f1f1db21-cfd8-4071-8923-9a7b08eeb035" containerID="3be04e7a35ee8b1f7974221630a8d86ef386b9192e200461b86609b1dcff0a16" exitCode=0 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.556099 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5x2p" event={"ID":"f1f1db21-cfd8-4071-8923-9a7b08eeb035","Type":"ContainerDied","Data":"3be04e7a35ee8b1f7974221630a8d86ef386b9192e200461b86609b1dcff0a16"} Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.556131 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5x2p" event={"ID":"f1f1db21-cfd8-4071-8923-9a7b08eeb035","Type":"ContainerStarted","Data":"d9031f2115d5a0491d7346be7a8c83fae4f49add1cb71d442192084328c57d78"} Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.559281 4592 generic.go:334] "Generic (PLEG): container finished" podID="baf334fc-312d-4264-a6e0-a4c2569421d2" containerID="dd8efb7ee80ba2e6a91ac0272393cd79fc385fcb0ef4c7172f39c9dd1c9f78a2" exitCode=0 Sep 29 16:56:20 crc kubenswrapper[4592]: I0929 16:56:20.559319 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2j4d" event={"ID":"baf334fc-312d-4264-a6e0-a4c2569421d2","Type":"ContainerDied","Data":"dd8efb7ee80ba2e6a91ac0272393cd79fc385fcb0ef4c7172f39c9dd1c9f78a2"} Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.565543 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztq52" event={"ID":"b766f454-e3ec-4c1c-b730-a4b9a4c47068","Type":"ContainerStarted","Data":"230bb357ce0f27e62276086f8bb2464ce4425f16ec52cd53f6cb4bf15e594b80"} Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.567641 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-29m4q" event={"ID":"204d33ca-6209-484f-b882-14d0c4270129","Type":"ContainerStarted","Data":"b34950ab47c5ab50e6315175ac8dccf065717cbe92786bb8eef17c92df8025b3"} Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.568963 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5x2p" event={"ID":"f1f1db21-cfd8-4071-8923-9a7b08eeb035","Type":"ContainerStarted","Data":"5f7c655344fd5e9b2932c6cadb34a9e3815241a16b6d0f38a30821f93ead2016"} Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.571198 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d2j4d" event={"ID":"baf334fc-312d-4264-a6e0-a4c2569421d2","Type":"ContainerStarted","Data":"9a1855d0ecd12ee84a27558fa13a59dbe87c491d36d258b4eedc378410f790d4"} Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.624122 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d2j4d" podStartSLOduration=3.04834254 podStartE2EDuration="5.624107978s" podCreationTimestamp="2025-09-29 16:56:16 +0000 UTC" firstStartedPulling="2025-09-29 16:56:18.536188527 +0000 UTC m=+308.683966208" lastFinishedPulling="2025-09-29 16:56:21.111953965 +0000 UTC m=+311.259731646" observedRunningTime="2025-09-29 16:56:21.621625833 +0000 UTC m=+311.769403514" watchObservedRunningTime="2025-09-29 16:56:21.624107978 +0000 UTC m=+311.771885659" Sep 29 16:56:21 crc kubenswrapper[4592]: I0929 16:56:21.642613 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-29m4q" podStartSLOduration=2.040507791 podStartE2EDuration="4.642592996s" podCreationTimestamp="2025-09-29 16:56:17 +0000 UTC" firstStartedPulling="2025-09-29 16:56:18.537265059 +0000 UTC m=+308.685042740" lastFinishedPulling="2025-09-29 16:56:21.139350274 +0000 UTC m=+311.287127945" observedRunningTime="2025-09-29 16:56:21.639635327 +0000 UTC m=+311.787413028" watchObservedRunningTime="2025-09-29 16:56:21.642592996 +0000 UTC m=+311.790370677" Sep 29 16:56:22 crc kubenswrapper[4592]: I0929 16:56:22.578885 4592 generic.go:334] "Generic (PLEG): container finished" podID="f1f1db21-cfd8-4071-8923-9a7b08eeb035" containerID="5f7c655344fd5e9b2932c6cadb34a9e3815241a16b6d0f38a30821f93ead2016" exitCode=0 Sep 29 16:56:22 crc kubenswrapper[4592]: I0929 16:56:22.578940 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5x2p" event={"ID":"f1f1db21-cfd8-4071-8923-9a7b08eeb035","Type":"ContainerDied","Data":"5f7c655344fd5e9b2932c6cadb34a9e3815241a16b6d0f38a30821f93ead2016"} Sep 29 16:56:22 crc kubenswrapper[4592]: I0929 16:56:22.583714 4592 generic.go:334] "Generic (PLEG): container finished" podID="b766f454-e3ec-4c1c-b730-a4b9a4c47068" containerID="230bb357ce0f27e62276086f8bb2464ce4425f16ec52cd53f6cb4bf15e594b80" exitCode=0 Sep 29 16:56:22 crc kubenswrapper[4592]: I0929 16:56:22.584538 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztq52" event={"ID":"b766f454-e3ec-4c1c-b730-a4b9a4c47068","Type":"ContainerDied","Data":"230bb357ce0f27e62276086f8bb2464ce4425f16ec52cd53f6cb4bf15e594b80"} Sep 29 16:56:24 crc kubenswrapper[4592]: I0929 16:56:24.595683 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ztq52" event={"ID":"b766f454-e3ec-4c1c-b730-a4b9a4c47068","Type":"ContainerStarted","Data":"090bb2c6ebb623dc87bfbbac533e1c077a852ec33417122cce95a4db589f756c"} Sep 29 16:56:24 crc kubenswrapper[4592]: I0929 16:56:24.598637 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b5x2p" event={"ID":"f1f1db21-cfd8-4071-8923-9a7b08eeb035","Type":"ContainerStarted","Data":"d3442286338a3cc86b4525b91b1b74a92b5e3e8b321cc298a66cf3fc2716ab05"} Sep 29 16:56:24 crc kubenswrapper[4592]: I0929 16:56:24.643818 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b5x2p" podStartSLOduration=3.050158776 podStartE2EDuration="5.643800845s" podCreationTimestamp="2025-09-29 16:56:19 +0000 UTC" firstStartedPulling="2025-09-29 16:56:20.557276906 +0000 UTC m=+310.705054587" lastFinishedPulling="2025-09-29 16:56:23.150918975 +0000 UTC m=+313.298696656" observedRunningTime="2025-09-29 16:56:24.642116544 +0000 UTC m=+314.789894225" watchObservedRunningTime="2025-09-29 16:56:24.643800845 +0000 UTC m=+314.791578546" Sep 29 16:56:24 crc kubenswrapper[4592]: I0929 16:56:24.645618 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ztq52" podStartSLOduration=3.216518238 podStartE2EDuration="5.645609159s" podCreationTimestamp="2025-09-29 16:56:19 +0000 UTC" firstStartedPulling="2025-09-29 16:56:20.550706297 +0000 UTC m=+310.698483978" lastFinishedPulling="2025-09-29 16:56:22.979797208 +0000 UTC m=+313.127574899" observedRunningTime="2025-09-29 16:56:24.625183921 +0000 UTC m=+314.772961612" watchObservedRunningTime="2025-09-29 16:56:24.645609159 +0000 UTC m=+314.793386830" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.201733 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.203130 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.245944 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.402920 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.402972 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.443397 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.655074 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d2j4d" Sep 29 16:56:27 crc kubenswrapper[4592]: I0929 16:56:27.655404 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-29m4q" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.607092 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.607413 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.664045 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.705686 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b5x2p" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.801601 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.801650 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:29 crc kubenswrapper[4592]: I0929 16:56:29.853526 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:30 crc kubenswrapper[4592]: I0929 16:56:30.666466 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ztq52" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.234810 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" containerID="cri-o://e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b" gracePeriod=15 Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.600582 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628231 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628269 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628313 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628341 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628367 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628394 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628418 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628451 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628474 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628502 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628533 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628590 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5h259\" (UniqueName: \"kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628615 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.628638 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error\") pod \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\" (UID: \"e1aa3a51-f8e6-49a1-8013-74755f9c89b0\") " Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.632799 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.633096 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.641081 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-pdmzq"] Sep 29 16:56:41 crc kubenswrapper[4592]: E0929 16:56:41.641425 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.641446 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.641655 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerName="oauth-openshift" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.642318 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.644676 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.645019 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.645337 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.661225 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-pdmzq"] Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.667471 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.668559 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.673671 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.673974 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259" (OuterVolumeSpecName: "kube-api-access-5h259") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "kube-api-access-5h259". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.683417 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.683822 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.684439 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.684789 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.684952 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "e1aa3a51-f8e6-49a1-8013-74755f9c89b0" (UID: "e1aa3a51-f8e6-49a1-8013-74755f9c89b0"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.689560 4592 generic.go:334] "Generic (PLEG): container finished" podID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" containerID="e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b" exitCode=0 Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.689601 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" event={"ID":"e1aa3a51-f8e6-49a1-8013-74755f9c89b0","Type":"ContainerDied","Data":"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b"} Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.689628 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" event={"ID":"e1aa3a51-f8e6-49a1-8013-74755f9c89b0","Type":"ContainerDied","Data":"e6524c4652a919395c4ba730c343b61b4628090a6ccd610edb50a57c14b051a1"} Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.689649 4592 scope.go:117] "RemoveContainer" containerID="e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.689890 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-5l86w" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.713178 4592 scope.go:117] "RemoveContainer" containerID="e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b" Sep 29 16:56:41 crc kubenswrapper[4592]: E0929 16:56:41.714133 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b\": container with ID starting with e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b not found: ID does not exist" containerID="e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.714191 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b"} err="failed to get container status \"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b\": rpc error: code = NotFound desc = could not find container \"e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b\": container with ID starting with e52a455c4e47d2a8f704f604b83757fe601bcfb08e9a6b28b963b6c40ce25c6b not found: ID does not exist" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.721791 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.724845 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-5l86w"] Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729479 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-dir\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729519 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729548 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729569 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729598 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729614 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729629 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-policies\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729648 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729665 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729687 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729706 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729722 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmw6m\" (UniqueName: \"kubernetes.io/projected/5b5c784e-9137-4598-b3d8-8711b82111fe-kube-api-access-zmw6m\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729739 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729753 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729793 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729804 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729814 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5h259\" (UniqueName: \"kubernetes.io/projected/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-kube-api-access-5h259\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729822 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729832 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.729847 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730453 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730464 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730475 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730484 4592 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-policies\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730493 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730501 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730509 4592 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-audit-dir\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.730519 4592 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e1aa3a51-f8e6-49a1-8013-74755f9c89b0-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.831990 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832039 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832064 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832080 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmw6m\" (UniqueName: \"kubernetes.io/projected/5b5c784e-9137-4598-b3d8-8711b82111fe-kube-api-access-zmw6m\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832099 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832118 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832136 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-dir\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832190 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832229 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832253 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832312 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832331 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-policies\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.832348 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.833002 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-service-ca\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.833270 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-dir\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.833563 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.834055 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.835104 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5b5c784e-9137-4598-b3d8-8711b82111fe-audit-policies\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.835809 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.835809 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.836187 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.836240 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-error\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.836784 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.837216 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-session\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.837955 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-user-template-login\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.839290 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5b5c784e-9137-4598-b3d8-8711b82111fe-v4-0-config-system-router-certs\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:41 crc kubenswrapper[4592]: I0929 16:56:41.851672 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmw6m\" (UniqueName: \"kubernetes.io/projected/5b5c784e-9137-4598-b3d8-8711b82111fe-kube-api-access-zmw6m\") pod \"oauth-openshift-6fffd54687-pdmzq\" (UID: \"5b5c784e-9137-4598-b3d8-8711b82111fe\") " pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.002734 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.201029 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6fffd54687-pdmzq"] Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.695728 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" event={"ID":"5b5c784e-9137-4598-b3d8-8711b82111fe","Type":"ContainerStarted","Data":"250c77fdc2610170ae2f4e13786e30b341fb3db02294dc1645455aafe1a70f25"} Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.696074 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" event={"ID":"5b5c784e-9137-4598-b3d8-8711b82111fe","Type":"ContainerStarted","Data":"460eec52def6e3ceeb150ae7e8fd6306fbf46b82894c5ea85b758f0b30215908"} Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.696095 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.696931 4592 patch_prober.go:28] interesting pod/oauth-openshift-6fffd54687-pdmzq container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.60:6443/healthz\": dial tcp 10.217.0.60:6443: connect: connection refused" start-of-body= Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.696966 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" podUID="5b5c784e-9137-4598-b3d8-8711b82111fe" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.60:6443/healthz\": dial tcp 10.217.0.60:6443: connect: connection refused" Sep 29 16:56:42 crc kubenswrapper[4592]: I0929 16:56:42.713939 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" podStartSLOduration=26.713919106 podStartE2EDuration="26.713919106s" podCreationTimestamp="2025-09-29 16:56:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:56:42.71104373 +0000 UTC m=+332.858821411" watchObservedRunningTime="2025-09-29 16:56:42.713919106 +0000 UTC m=+332.861696787" Sep 29 16:56:43 crc kubenswrapper[4592]: I0929 16:56:43.188916 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1aa3a51-f8e6-49a1-8013-74755f9c89b0" path="/var/lib/kubelet/pods/e1aa3a51-f8e6-49a1-8013-74755f9c89b0/volumes" Sep 29 16:56:43 crc kubenswrapper[4592]: I0929 16:56:43.705903 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6fffd54687-pdmzq" Sep 29 16:57:30 crc kubenswrapper[4592]: I0929 16:57:30.883749 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:57:30 crc kubenswrapper[4592]: I0929 16:57:30.884465 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:58:00 crc kubenswrapper[4592]: I0929 16:58:00.883635 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:58:00 crc kubenswrapper[4592]: I0929 16:58:00.884164 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:58:30 crc kubenswrapper[4592]: I0929 16:58:30.883529 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 16:58:30 crc kubenswrapper[4592]: I0929 16:58:30.884196 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 16:58:30 crc kubenswrapper[4592]: I0929 16:58:30.884259 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 16:58:30 crc kubenswrapper[4592]: I0929 16:58:30.884873 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 16:58:30 crc kubenswrapper[4592]: I0929 16:58:30.884935 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7" gracePeriod=600 Sep 29 16:58:31 crc kubenswrapper[4592]: I0929 16:58:31.269629 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7" exitCode=0 Sep 29 16:58:31 crc kubenswrapper[4592]: I0929 16:58:31.269685 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7"} Sep 29 16:58:31 crc kubenswrapper[4592]: I0929 16:58:31.269978 4592 scope.go:117] "RemoveContainer" containerID="faf027b4354b1f6ec029746dae4263340ebbcba318344c7a0499ba786aa67230" Sep 29 16:58:32 crc kubenswrapper[4592]: I0929 16:58:32.280009 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780"} Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.097117 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2b6hf"] Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.098385 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.117963 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2b6hf"] Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.264818 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/da1dacd4-b567-46c4-a895-9bd1f2a21b45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.264914 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/da1dacd4-b567-46c4-a895-9bd1f2a21b45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.264975 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-trusted-ca\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.265004 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.265041 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddn9x\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-kube-api-access-ddn9x\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.265060 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-tls\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.265093 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-certificates\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.265113 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-bound-sa-token\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.284001 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.366077 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-trusted-ca\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.366184 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddn9x\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-kube-api-access-ddn9x\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.366208 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-tls\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.366236 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-certificates\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.366262 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-bound-sa-token\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.367368 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-trusted-ca\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.367522 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-certificates\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.367570 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/da1dacd4-b567-46c4-a895-9bd1f2a21b45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.367627 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/da1dacd4-b567-46c4-a895-9bd1f2a21b45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.367934 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/da1dacd4-b567-46c4-a895-9bd1f2a21b45-ca-trust-extracted\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.371286 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-registry-tls\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.371425 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/da1dacd4-b567-46c4-a895-9bd1f2a21b45-installation-pull-secrets\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.382261 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddn9x\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-kube-api-access-ddn9x\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.387412 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da1dacd4-b567-46c4-a895-9bd1f2a21b45-bound-sa-token\") pod \"image-registry-66df7c8f76-2b6hf\" (UID: \"da1dacd4-b567-46c4-a895-9bd1f2a21b45\") " pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.412063 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:14 crc kubenswrapper[4592]: I0929 16:59:14.666752 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-2b6hf"] Sep 29 16:59:14 crc kubenswrapper[4592]: W0929 16:59:14.673388 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda1dacd4_b567_46c4_a895_9bd1f2a21b45.slice/crio-816e3aca31675dd2d93dd3ec605643eab4d371d86b54f1076d167ce525fa20a2 WatchSource:0}: Error finding container 816e3aca31675dd2d93dd3ec605643eab4d371d86b54f1076d167ce525fa20a2: Status 404 returned error can't find the container with id 816e3aca31675dd2d93dd3ec605643eab4d371d86b54f1076d167ce525fa20a2 Sep 29 16:59:15 crc kubenswrapper[4592]: I0929 16:59:15.507958 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" event={"ID":"da1dacd4-b567-46c4-a895-9bd1f2a21b45","Type":"ContainerStarted","Data":"f4f4e5bffd5c3d758f19cb4656daaf1f5b1d68df592d118e9b16c9ae0ba66d7d"} Sep 29 16:59:15 crc kubenswrapper[4592]: I0929 16:59:15.508267 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:15 crc kubenswrapper[4592]: I0929 16:59:15.508280 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" event={"ID":"da1dacd4-b567-46c4-a895-9bd1f2a21b45","Type":"ContainerStarted","Data":"816e3aca31675dd2d93dd3ec605643eab4d371d86b54f1076d167ce525fa20a2"} Sep 29 16:59:15 crc kubenswrapper[4592]: I0929 16:59:15.530309 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" podStartSLOduration=1.530288944 podStartE2EDuration="1.530288944s" podCreationTimestamp="2025-09-29 16:59:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 16:59:15.52664737 +0000 UTC m=+485.674425071" watchObservedRunningTime="2025-09-29 16:59:15.530288944 +0000 UTC m=+485.678066625" Sep 29 16:59:34 crc kubenswrapper[4592]: I0929 16:59:34.418736 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-2b6hf" Sep 29 16:59:34 crc kubenswrapper[4592]: I0929 16:59:34.495481 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 16:59:59 crc kubenswrapper[4592]: I0929 16:59:59.563847 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" podUID="d4b30d14-aea8-4482-8220-81cf36dc8a93" containerName="registry" containerID="cri-o://0a99c9f872186930dc2ff26f77162bd11595bcc5751d9fca6858048d734722da" gracePeriod=30 Sep 29 16:59:59 crc kubenswrapper[4592]: I0929 16:59:59.754529 4592 generic.go:334] "Generic (PLEG): container finished" podID="d4b30d14-aea8-4482-8220-81cf36dc8a93" containerID="0a99c9f872186930dc2ff26f77162bd11595bcc5751d9fca6858048d734722da" exitCode=0 Sep 29 16:59:59 crc kubenswrapper[4592]: I0929 16:59:59.754689 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" event={"ID":"d4b30d14-aea8-4482-8220-81cf36dc8a93","Type":"ContainerDied","Data":"0a99c9f872186930dc2ff26f77162bd11595bcc5751d9fca6858048d734722da"} Sep 29 16:59:59 crc kubenswrapper[4592]: I0929 16:59:59.995697 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079436 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079516 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079547 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079596 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079741 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079781 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z6bs\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079819 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.079885 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets\") pod \"d4b30d14-aea8-4482-8220-81cf36dc8a93\" (UID: \"d4b30d14-aea8-4482-8220-81cf36dc8a93\") " Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.081078 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.081590 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.087870 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.088124 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.088488 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs" (OuterVolumeSpecName: "kube-api-access-7z6bs") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "kube-api-access-7z6bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.095572 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.095743 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.100890 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "d4b30d14-aea8-4482-8220-81cf36dc8a93" (UID: "d4b30d14-aea8-4482-8220-81cf36dc8a93"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.143941 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76"] Sep 29 17:00:00 crc kubenswrapper[4592]: E0929 17:00:00.144448 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b30d14-aea8-4482-8220-81cf36dc8a93" containerName="registry" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.144470 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b30d14-aea8-4482-8220-81cf36dc8a93" containerName="registry" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.144559 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4b30d14-aea8-4482-8220-81cf36dc8a93" containerName="registry" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.145018 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.147081 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.147522 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.150066 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76"] Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.180629 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.180873 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffcf4\" (UniqueName: \"kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.180990 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181093 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-trusted-ca\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181173 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z6bs\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-kube-api-access-7z6bs\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181251 4592 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181301 4592 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d4b30d14-aea8-4482-8220-81cf36dc8a93-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181347 4592 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d4b30d14-aea8-4482-8220-81cf36dc8a93-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181403 4592 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-tls\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.181450 4592 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d4b30d14-aea8-4482-8220-81cf36dc8a93-registry-certificates\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.281986 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.282311 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffcf4\" (UniqueName: \"kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.282437 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.283064 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.286188 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.297828 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffcf4\" (UniqueName: \"kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4\") pod \"collect-profiles-29319420-79l76\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.468129 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.640877 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76"] Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.762654 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" event={"ID":"d4b30d14-aea8-4482-8220-81cf36dc8a93","Type":"ContainerDied","Data":"ab8fc55153f851e07765574d27781a5e2cd6619fbf638692d7b1273d1d30869a"} Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.763052 4592 scope.go:117] "RemoveContainer" containerID="0a99c9f872186930dc2ff26f77162bd11595bcc5751d9fca6858048d734722da" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.762690 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w748k" Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.764703 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" event={"ID":"33250f9a-7065-4cae-a125-33fe961c1ef4","Type":"ContainerStarted","Data":"4c4fd684f36d65e150e1c28565985405e028b3c3d3f4184e303d949b86ca2101"} Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.794643 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 17:00:00 crc kubenswrapper[4592]: I0929 17:00:00.798621 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w748k"] Sep 29 17:00:01 crc kubenswrapper[4592]: I0929 17:00:01.190439 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4b30d14-aea8-4482-8220-81cf36dc8a93" path="/var/lib/kubelet/pods/d4b30d14-aea8-4482-8220-81cf36dc8a93/volumes" Sep 29 17:00:01 crc kubenswrapper[4592]: I0929 17:00:01.772723 4592 generic.go:334] "Generic (PLEG): container finished" podID="33250f9a-7065-4cae-a125-33fe961c1ef4" containerID="b08b033c873a22417dff1442f96911218963743d32b7fdaeb51aba404769677d" exitCode=0 Sep 29 17:00:01 crc kubenswrapper[4592]: I0929 17:00:01.772772 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" event={"ID":"33250f9a-7065-4cae-a125-33fe961c1ef4","Type":"ContainerDied","Data":"b08b033c873a22417dff1442f96911218963743d32b7fdaeb51aba404769677d"} Sep 29 17:00:02 crc kubenswrapper[4592]: I0929 17:00:02.965574 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.122220 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume\") pod \"33250f9a-7065-4cae-a125-33fe961c1ef4\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.122563 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume\") pod \"33250f9a-7065-4cae-a125-33fe961c1ef4\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.122599 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffcf4\" (UniqueName: \"kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4\") pod \"33250f9a-7065-4cae-a125-33fe961c1ef4\" (UID: \"33250f9a-7065-4cae-a125-33fe961c1ef4\") " Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.123608 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume" (OuterVolumeSpecName: "config-volume") pod "33250f9a-7065-4cae-a125-33fe961c1ef4" (UID: "33250f9a-7065-4cae-a125-33fe961c1ef4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.127579 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "33250f9a-7065-4cae-a125-33fe961c1ef4" (UID: "33250f9a-7065-4cae-a125-33fe961c1ef4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.127594 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4" (OuterVolumeSpecName: "kube-api-access-ffcf4") pod "33250f9a-7065-4cae-a125-33fe961c1ef4" (UID: "33250f9a-7065-4cae-a125-33fe961c1ef4"). InnerVolumeSpecName "kube-api-access-ffcf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.224596 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33250f9a-7065-4cae-a125-33fe961c1ef4-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.224641 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffcf4\" (UniqueName: \"kubernetes.io/projected/33250f9a-7065-4cae-a125-33fe961c1ef4-kube-api-access-ffcf4\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.224656 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33250f9a-7065-4cae-a125-33fe961c1ef4-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.783693 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" event={"ID":"33250f9a-7065-4cae-a125-33fe961c1ef4","Type":"ContainerDied","Data":"4c4fd684f36d65e150e1c28565985405e028b3c3d3f4184e303d949b86ca2101"} Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.783984 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c4fd684f36d65e150e1c28565985405e028b3c3d3f4184e303d949b86ca2101" Sep 29 17:00:03 crc kubenswrapper[4592]: I0929 17:00:03.783823 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76" Sep 29 17:01:00 crc kubenswrapper[4592]: I0929 17:01:00.883686 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:01:00 crc kubenswrapper[4592]: I0929 17:01:00.884245 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:01:30 crc kubenswrapper[4592]: I0929 17:01:30.883482 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:01:30 crc kubenswrapper[4592]: I0929 17:01:30.883852 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:02:00 crc kubenswrapper[4592]: I0929 17:02:00.883350 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:02:00 crc kubenswrapper[4592]: I0929 17:02:00.883785 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:02:00 crc kubenswrapper[4592]: I0929 17:02:00.883827 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:02:00 crc kubenswrapper[4592]: I0929 17:02:00.884348 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:02:00 crc kubenswrapper[4592]: I0929 17:02:00.884402 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780" gracePeriod=600 Sep 29 17:02:01 crc kubenswrapper[4592]: I0929 17:02:01.385453 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780" exitCode=0 Sep 29 17:02:01 crc kubenswrapper[4592]: I0929 17:02:01.385516 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780"} Sep 29 17:02:01 crc kubenswrapper[4592]: I0929 17:02:01.385620 4592 scope.go:117] "RemoveContainer" containerID="148831675bbb8aed327c76ca2e7313d94cd39b81a906464672542ffb204027b7" Sep 29 17:02:02 crc kubenswrapper[4592]: I0929 17:02:02.391931 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae"} Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.710537 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6phr8"] Sep 29 17:02:22 crc kubenswrapper[4592]: E0929 17:02:22.712064 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33250f9a-7065-4cae-a125-33fe961c1ef4" containerName="collect-profiles" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.712082 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="33250f9a-7065-4cae-a125-33fe961c1ef4" containerName="collect-profiles" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.712226 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="33250f9a-7065-4cae-a125-33fe961c1ef4" containerName="collect-profiles" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.712650 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.716536 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wnhfl"] Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.717305 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wnhfl" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.726008 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6phr8"] Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.737377 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.737449 4592 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-hcdhx" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.737803 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.737920 4592 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-d8ljf" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.741718 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wnhfl"] Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.746343 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-vdxkr"] Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.747146 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:22 crc kubenswrapper[4592]: W0929 17:02:22.752599 4592 reflector.go:561] object-"cert-manager"/"cert-manager-webhook-dockercfg-sdj7z": failed to list *v1.Secret: secrets "cert-manager-webhook-dockercfg-sdj7z" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "cert-manager": no relationship found between node 'crc' and this object Sep 29 17:02:22 crc kubenswrapper[4592]: E0929 17:02:22.752642 4592 reflector.go:158] "Unhandled Error" err="object-\"cert-manager\"/\"cert-manager-webhook-dockercfg-sdj7z\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-manager-webhook-dockercfg-sdj7z\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"cert-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.772528 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-vdxkr"] Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.860278 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k92zz\" (UniqueName: \"kubernetes.io/projected/eb6c1a73-f740-4b79-ab2a-ccf80a36deb5-kube-api-access-k92zz\") pod \"cert-manager-webhook-5655c58dd6-vdxkr\" (UID: \"eb6c1a73-f740-4b79-ab2a-ccf80a36deb5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.860355 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsxn2\" (UniqueName: \"kubernetes.io/projected/26c87439-d01b-405b-9567-f2c2c83283e1-kube-api-access-bsxn2\") pod \"cert-manager-cainjector-7f985d654d-6phr8\" (UID: \"26c87439-d01b-405b-9567-f2c2c83283e1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.860388 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb74p\" (UniqueName: \"kubernetes.io/projected/0080b650-be9f-452d-8c10-69ae3480edf2-kube-api-access-wb74p\") pod \"cert-manager-5b446d88c5-wnhfl\" (UID: \"0080b650-be9f-452d-8c10-69ae3480edf2\") " pod="cert-manager/cert-manager-5b446d88c5-wnhfl" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.960999 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k92zz\" (UniqueName: \"kubernetes.io/projected/eb6c1a73-f740-4b79-ab2a-ccf80a36deb5-kube-api-access-k92zz\") pod \"cert-manager-webhook-5655c58dd6-vdxkr\" (UID: \"eb6c1a73-f740-4b79-ab2a-ccf80a36deb5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.961073 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsxn2\" (UniqueName: \"kubernetes.io/projected/26c87439-d01b-405b-9567-f2c2c83283e1-kube-api-access-bsxn2\") pod \"cert-manager-cainjector-7f985d654d-6phr8\" (UID: \"26c87439-d01b-405b-9567-f2c2c83283e1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.961104 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb74p\" (UniqueName: \"kubernetes.io/projected/0080b650-be9f-452d-8c10-69ae3480edf2-kube-api-access-wb74p\") pod \"cert-manager-5b446d88c5-wnhfl\" (UID: \"0080b650-be9f-452d-8c10-69ae3480edf2\") " pod="cert-manager/cert-manager-5b446d88c5-wnhfl" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.980350 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb74p\" (UniqueName: \"kubernetes.io/projected/0080b650-be9f-452d-8c10-69ae3480edf2-kube-api-access-wb74p\") pod \"cert-manager-5b446d88c5-wnhfl\" (UID: \"0080b650-be9f-452d-8c10-69ae3480edf2\") " pod="cert-manager/cert-manager-5b446d88c5-wnhfl" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.981877 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k92zz\" (UniqueName: \"kubernetes.io/projected/eb6c1a73-f740-4b79-ab2a-ccf80a36deb5-kube-api-access-k92zz\") pod \"cert-manager-webhook-5655c58dd6-vdxkr\" (UID: \"eb6c1a73-f740-4b79-ab2a-ccf80a36deb5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:22 crc kubenswrapper[4592]: I0929 17:02:22.983285 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsxn2\" (UniqueName: \"kubernetes.io/projected/26c87439-d01b-405b-9567-f2c2c83283e1-kube-api-access-bsxn2\") pod \"cert-manager-cainjector-7f985d654d-6phr8\" (UID: \"26c87439-d01b-405b-9567-f2c2c83283e1\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.028255 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.034183 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-wnhfl" Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.245307 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-wnhfl"] Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.260205 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.309574 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-6phr8"] Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.489565 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wnhfl" event={"ID":"0080b650-be9f-452d-8c10-69ae3480edf2","Type":"ContainerStarted","Data":"baeec216354b33ea4a0308e12c37b3e40a263f525c0095bc16d9ff0943d3a370"} Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.490615 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" event={"ID":"26c87439-d01b-405b-9567-f2c2c83283e1","Type":"ContainerStarted","Data":"fddffce0cd01eaeaa4282869eebf51e288508105f415240a12d27ebb9625d489"} Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.860804 4592 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-sdj7z" Sep 29 17:02:23 crc kubenswrapper[4592]: I0929 17:02:23.870474 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:24 crc kubenswrapper[4592]: I0929 17:02:24.098846 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-vdxkr"] Sep 29 17:02:24 crc kubenswrapper[4592]: I0929 17:02:24.496868 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" event={"ID":"eb6c1a73-f740-4b79-ab2a-ccf80a36deb5","Type":"ContainerStarted","Data":"4570d9548ac3eb4b1e69700900e05f920462a234bc388cb830c008799f46d7cc"} Sep 29 17:02:26 crc kubenswrapper[4592]: I0929 17:02:26.507841 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" event={"ID":"26c87439-d01b-405b-9567-f2c2c83283e1","Type":"ContainerStarted","Data":"16e3d82b7f272c40eed519b5efed3596b6f89b4722537ab1c7c525752df7e722"} Sep 29 17:02:26 crc kubenswrapper[4592]: I0929 17:02:26.509263 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-wnhfl" event={"ID":"0080b650-be9f-452d-8c10-69ae3480edf2","Type":"ContainerStarted","Data":"0e26e638d34f745eb98ecfd6e089cb1de3b700bec559d97d6149c9c7788c347f"} Sep 29 17:02:26 crc kubenswrapper[4592]: I0929 17:02:26.526977 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-6phr8" podStartSLOduration=2.014270747 podStartE2EDuration="4.526944907s" podCreationTimestamp="2025-09-29 17:02:22 +0000 UTC" firstStartedPulling="2025-09-29 17:02:23.300380878 +0000 UTC m=+673.448158549" lastFinishedPulling="2025-09-29 17:02:25.813055028 +0000 UTC m=+675.960832709" observedRunningTime="2025-09-29 17:02:26.525903266 +0000 UTC m=+676.673680947" watchObservedRunningTime="2025-09-29 17:02:26.526944907 +0000 UTC m=+676.674722588" Sep 29 17:02:26 crc kubenswrapper[4592]: I0929 17:02:26.548813 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-wnhfl" podStartSLOduration=1.997102886 podStartE2EDuration="4.548791044s" podCreationTimestamp="2025-09-29 17:02:22 +0000 UTC" firstStartedPulling="2025-09-29 17:02:23.259942637 +0000 UTC m=+673.407720318" lastFinishedPulling="2025-09-29 17:02:25.811630795 +0000 UTC m=+675.959408476" observedRunningTime="2025-09-29 17:02:26.544584122 +0000 UTC m=+676.692361803" watchObservedRunningTime="2025-09-29 17:02:26.548791044 +0000 UTC m=+676.696568725" Sep 29 17:02:27 crc kubenswrapper[4592]: I0929 17:02:27.516698 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" event={"ID":"eb6c1a73-f740-4b79-ab2a-ccf80a36deb5","Type":"ContainerStarted","Data":"be5d21fbd23417d2d9cc4d1b3b0aa8d9d5eebe2d8e8730c9f84fdcaa0c64b3c5"} Sep 29 17:02:27 crc kubenswrapper[4592]: I0929 17:02:27.532231 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" podStartSLOduration=2.84234362 podStartE2EDuration="5.532214162s" podCreationTimestamp="2025-09-29 17:02:22 +0000 UTC" firstStartedPulling="2025-09-29 17:02:24.343179189 +0000 UTC m=+674.490956870" lastFinishedPulling="2025-09-29 17:02:27.033049731 +0000 UTC m=+677.180827412" observedRunningTime="2025-09-29 17:02:27.531914234 +0000 UTC m=+677.679691925" watchObservedRunningTime="2025-09-29 17:02:27.532214162 +0000 UTC m=+677.679991843" Sep 29 17:02:28 crc kubenswrapper[4592]: I0929 17:02:28.521275 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:33 crc kubenswrapper[4592]: I0929 17:02:33.874205 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-vdxkr" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.256390 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-47pt5"] Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257297 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-controller" containerID="cri-o://ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257371 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="sbdb" containerID="cri-o://55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257326 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="nbdb" containerID="cri-o://e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257429 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-node" containerID="cri-o://833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257430 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-acl-logging" containerID="cri-o://8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257460 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.257461 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="northd" containerID="cri-o://865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.293522 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" containerID="cri-o://36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" gracePeriod=30 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.554614 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/2.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.555050 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/1.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.555098 4592 generic.go:334] "Generic (PLEG): container finished" podID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" containerID="1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a" exitCode=2 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.555173 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerDied","Data":"1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.555227 4592 scope.go:117] "RemoveContainer" containerID="635cb0ee8fce00562ab053c3fcc72d78f2b15220f7fbd4f3cd76cca15d5435be" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.555872 4592 scope.go:117] "RemoveContainer" containerID="1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.556253 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gbbtb_openshift-multus(2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89)\"" pod="openshift-multus/multus-gbbtb" podUID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.557958 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovnkube-controller/3.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.559730 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-acl-logging/0.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560137 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-controller/0.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560460 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" exitCode=0 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560480 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" exitCode=0 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560489 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" exitCode=0 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560497 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" exitCode=0 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560504 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" exitCode=143 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560510 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" exitCode=143 Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560530 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560554 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560565 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560573 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560583 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.560591 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6"} Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.607516 4592 scope.go:117] "RemoveContainer" containerID="a2c8a32c73f9da92aa34adb412a119c81c2834a03c6d626672db010a3d2bff64" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.614532 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-acl-logging/0.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.615229 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-controller/0.log" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.615562 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671202 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rjq9v"] Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671498 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671517 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671532 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-node" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671539 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-node" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671550 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kubecfg-setup" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671578 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kubecfg-setup" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671586 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="northd" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671593 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="northd" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671606 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671616 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671626 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="sbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671632 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="sbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671663 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671670 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671687 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="nbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671694 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="nbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671704 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671711 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671739 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671747 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.671756 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-acl-logging" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671763 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-acl-logging" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671901 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671912 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-node" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671921 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="nbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671929 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671936 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="northd" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671945 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="sbdb" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671974 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671986 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovn-acl-logging" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.671993 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="kube-rbac-proxy-ovn-metrics" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.672002 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.672189 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.672200 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: E0929 17:02:35.672210 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.672218 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.672370 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.672388 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" containerName="ovnkube-controller" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.674809 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713704 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-script-lib\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713769 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-kubelet\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713787 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-slash\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713810 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-netd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713851 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-netns\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713874 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-ovn\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713899 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713922 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-var-lib-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713942 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsgkm\" (UniqueName: \"kubernetes.io/projected/c2fe7eab-4398-4a63-8f2d-124e93afad84-kube-api-access-zsgkm\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.713983 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714025 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-node-log\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714048 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-bin\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714062 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-config\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714079 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-systemd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714115 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovn-node-metrics-cert\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714143 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-log-socket\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714208 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-systemd-units\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714228 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714316 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-env-overrides\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.714350 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-etc-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814679 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814727 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814771 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814810 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814834 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814872 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814903 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814913 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814936 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814952 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814964 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.814960 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815004 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815036 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95m58\" (UniqueName: \"kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815060 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815078 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815099 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815116 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815135 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815181 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815208 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815230 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815254 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes\") pod \"b22efd65-426d-4220-9e18-5a84827be8ac\" (UID: \"b22efd65-426d-4220-9e18-5a84827be8ac\") " Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815251 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815277 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815313 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash" (OuterVolumeSpecName: "host-slash") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815336 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815355 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket" (OuterVolumeSpecName: "log-socket") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815348 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815371 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815394 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815417 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815419 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815439 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815447 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.815451 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816099 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log" (OuterVolumeSpecName: "node-log") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816169 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-bin\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816196 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-systemd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816214 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-config\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816238 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovn-node-metrics-cert\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816243 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-bin\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816289 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-log-socket\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816262 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-log-socket\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816259 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-systemd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816370 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-systemd-units\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816404 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816460 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-env-overrides\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816488 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-etc-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816541 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-script-lib\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816588 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-kubelet\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816621 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-slash\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816656 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-netd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816680 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-netns\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816700 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-ovn\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816725 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816749 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-var-lib-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816756 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816779 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsgkm\" (UniqueName: \"kubernetes.io/projected/c2fe7eab-4398-4a63-8f2d-124e93afad84-kube-api-access-zsgkm\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816808 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816834 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-cni-netd\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816849 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-node-log\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816919 4592 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816935 4592 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-env-overrides\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816948 4592 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816949 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-env-overrides\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816961 4592 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-bin\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816973 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-config\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816988 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-kubelet\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816994 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-node-log\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817017 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-slash\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816810 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-systemd-units\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.816952 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-etc-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817049 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-var-lib-openvswitch\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817068 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-run-ovn\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817090 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-netns\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817097 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-run-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817126 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/c2fe7eab-4398-4a63-8f2d-124e93afad84-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817179 4592 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-systemd-units\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817196 4592 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-netns\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817224 4592 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817236 4592 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/b22efd65-426d-4220-9e18-5a84827be8ac-ovnkube-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817248 4592 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817258 4592 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-cni-netd\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817269 4592 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-log-socket\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817279 4592 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-kubelet\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817289 4592 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-slash\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817300 4592 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817312 4592 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817324 4592 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817338 4592 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-node-log\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.817414 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovnkube-script-lib\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.820529 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58" (OuterVolumeSpecName: "kube-api-access-95m58") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "kube-api-access-95m58". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.820790 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/c2fe7eab-4398-4a63-8f2d-124e93afad84-ovn-node-metrics-cert\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.821278 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.833169 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "b22efd65-426d-4220-9e18-5a84827be8ac" (UID: "b22efd65-426d-4220-9e18-5a84827be8ac"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.835135 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsgkm\" (UniqueName: \"kubernetes.io/projected/c2fe7eab-4398-4a63-8f2d-124e93afad84-kube-api-access-zsgkm\") pod \"ovnkube-node-rjq9v\" (UID: \"c2fe7eab-4398-4a63-8f2d-124e93afad84\") " pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.918642 4592 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/b22efd65-426d-4220-9e18-5a84827be8ac-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.918694 4592 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/b22efd65-426d-4220-9e18-5a84827be8ac-run-systemd\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.918707 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95m58\" (UniqueName: \"kubernetes.io/projected/b22efd65-426d-4220-9e18-5a84827be8ac-kube-api-access-95m58\") on node \"crc\" DevicePath \"\"" Sep 29 17:02:35 crc kubenswrapper[4592]: I0929 17:02:35.988919 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:36 crc kubenswrapper[4592]: W0929 17:02:36.014744 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2fe7eab_4398_4a63_8f2d_124e93afad84.slice/crio-664908fa3d8952b4353677c14cfedea76e4c732ec7902467b02ee274af645f69 WatchSource:0}: Error finding container 664908fa3d8952b4353677c14cfedea76e4c732ec7902467b02ee274af645f69: Status 404 returned error can't find the container with id 664908fa3d8952b4353677c14cfedea76e4c732ec7902467b02ee274af645f69 Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.569237 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-acl-logging/0.log" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571166 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-47pt5_b22efd65-426d-4220-9e18-5a84827be8ac/ovn-controller/0.log" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571695 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" exitCode=0 Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571728 4592 generic.go:334] "Generic (PLEG): container finished" podID="b22efd65-426d-4220-9e18-5a84827be8ac" containerID="865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" exitCode=0 Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571742 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d"} Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571769 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6"} Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571779 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" event={"ID":"b22efd65-426d-4220-9e18-5a84827be8ac","Type":"ContainerDied","Data":"53ac2a49c8b45aa9d478914f5bdb6d9587677e64fe398059924ee465ea3e7972"} Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571796 4592 scope.go:117] "RemoveContainer" containerID="36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.571819 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-47pt5" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.578778 4592 generic.go:334] "Generic (PLEG): container finished" podID="c2fe7eab-4398-4a63-8f2d-124e93afad84" containerID="d67ebf8e986ce7240696c17f5480a69faac03052faeeb05b32a47ecb2b4a3664" exitCode=0 Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.578838 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerDied","Data":"d67ebf8e986ce7240696c17f5480a69faac03052faeeb05b32a47ecb2b4a3664"} Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.578867 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"664908fa3d8952b4353677c14cfedea76e4c732ec7902467b02ee274af645f69"} Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.582754 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/2.log" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.604051 4592 scope.go:117] "RemoveContainer" containerID="55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.641220 4592 scope.go:117] "RemoveContainer" containerID="e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.667751 4592 scope.go:117] "RemoveContainer" containerID="865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.676398 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-47pt5"] Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.684448 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-47pt5"] Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.686701 4592 scope.go:117] "RemoveContainer" containerID="0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.700196 4592 scope.go:117] "RemoveContainer" containerID="833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.716292 4592 scope.go:117] "RemoveContainer" containerID="8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.738837 4592 scope.go:117] "RemoveContainer" containerID="ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.763259 4592 scope.go:117] "RemoveContainer" containerID="75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.783619 4592 scope.go:117] "RemoveContainer" containerID="36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.784304 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96\": container with ID starting with 36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96 not found: ID does not exist" containerID="36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784332 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96"} err="failed to get container status \"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96\": rpc error: code = NotFound desc = could not find container \"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96\": container with ID starting with 36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784352 4592 scope.go:117] "RemoveContainer" containerID="55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.784619 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\": container with ID starting with 55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d not found: ID does not exist" containerID="55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784662 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d"} err="failed to get container status \"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\": rpc error: code = NotFound desc = could not find container \"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\": container with ID starting with 55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784689 4592 scope.go:117] "RemoveContainer" containerID="e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.784909 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\": container with ID starting with e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d not found: ID does not exist" containerID="e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784933 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d"} err="failed to get container status \"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\": rpc error: code = NotFound desc = could not find container \"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\": container with ID starting with e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.784971 4592 scope.go:117] "RemoveContainer" containerID="865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.785193 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\": container with ID starting with 865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6 not found: ID does not exist" containerID="865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785214 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6"} err="failed to get container status \"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\": rpc error: code = NotFound desc = could not find container \"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\": container with ID starting with 865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785227 4592 scope.go:117] "RemoveContainer" containerID="0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.785418 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\": container with ID starting with 0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78 not found: ID does not exist" containerID="0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785444 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78"} err="failed to get container status \"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\": rpc error: code = NotFound desc = could not find container \"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\": container with ID starting with 0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785458 4592 scope.go:117] "RemoveContainer" containerID="833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.785616 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\": container with ID starting with 833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b not found: ID does not exist" containerID="833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785637 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b"} err="failed to get container status \"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\": rpc error: code = NotFound desc = could not find container \"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\": container with ID starting with 833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785652 4592 scope.go:117] "RemoveContainer" containerID="8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.785809 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\": container with ID starting with 8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5 not found: ID does not exist" containerID="8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785831 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5"} err="failed to get container status \"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\": rpc error: code = NotFound desc = could not find container \"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\": container with ID starting with 8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.785845 4592 scope.go:117] "RemoveContainer" containerID="ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.786004 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\": container with ID starting with ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6 not found: ID does not exist" containerID="ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786025 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6"} err="failed to get container status \"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\": rpc error: code = NotFound desc = could not find container \"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\": container with ID starting with ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786038 4592 scope.go:117] "RemoveContainer" containerID="75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22" Sep 29 17:02:36 crc kubenswrapper[4592]: E0929 17:02:36.786234 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\": container with ID starting with 75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22 not found: ID does not exist" containerID="75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786256 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22"} err="failed to get container status \"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\": rpc error: code = NotFound desc = could not find container \"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\": container with ID starting with 75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786268 4592 scope.go:117] "RemoveContainer" containerID="36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786452 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96"} err="failed to get container status \"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96\": rpc error: code = NotFound desc = could not find container \"36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96\": container with ID starting with 36429284b265031550eaa5c3ebab79f392f767f27fc441a93e1bb04e684c6c96 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786470 4592 scope.go:117] "RemoveContainer" containerID="55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786622 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d"} err="failed to get container status \"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\": rpc error: code = NotFound desc = could not find container \"55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d\": container with ID starting with 55fc80cd2bd70c89f9aeb10a265f5e59e0bdb98125be77145cd713777250830d not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786641 4592 scope.go:117] "RemoveContainer" containerID="e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786792 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d"} err="failed to get container status \"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\": rpc error: code = NotFound desc = could not find container \"e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d\": container with ID starting with e67a5f04a25238ba95b27d31eeda9110a135b01e8ec841c69fb50b08e531bc3d not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786811 4592 scope.go:117] "RemoveContainer" containerID="865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786956 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6"} err="failed to get container status \"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\": rpc error: code = NotFound desc = could not find container \"865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6\": container with ID starting with 865241e80622b0a57cd5a30d382047068b9192fc5c4eb4701b2e7569f07d84f6 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.786974 4592 scope.go:117] "RemoveContainer" containerID="0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787116 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78"} err="failed to get container status \"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\": rpc error: code = NotFound desc = could not find container \"0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78\": container with ID starting with 0cd9effe85ef23e366d9527b52030aaf933bf4a48bafc25dd7eb8f5be9f36a78 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787136 4592 scope.go:117] "RemoveContainer" containerID="833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787352 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b"} err="failed to get container status \"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\": rpc error: code = NotFound desc = could not find container \"833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b\": container with ID starting with 833a693b64569bea6b2f7a597fd72b2f7a1b03e4672ab23291492ccf5b3d4e5b not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787371 4592 scope.go:117] "RemoveContainer" containerID="8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787515 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5"} err="failed to get container status \"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\": rpc error: code = NotFound desc = could not find container \"8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5\": container with ID starting with 8c204481ff80d80ed507dd093dfdba82e5d89bf29efab174d56085cf6a24cae5 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787534 4592 scope.go:117] "RemoveContainer" containerID="ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787681 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6"} err="failed to get container status \"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\": rpc error: code = NotFound desc = could not find container \"ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6\": container with ID starting with ac5f86bf0348722cdb3132965ae0c453fc93c2733af96ab5ff3afa188adc01f6 not found: ID does not exist" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787700 4592 scope.go:117] "RemoveContainer" containerID="75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22" Sep 29 17:02:36 crc kubenswrapper[4592]: I0929 17:02:36.787847 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22"} err="failed to get container status \"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\": rpc error: code = NotFound desc = could not find container \"75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22\": container with ID starting with 75cb9ba28cbbe92e7c6be3cc6e4350b5394df0be8896ff79dd1b6eb123fd0b22 not found: ID does not exist" Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.189250 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b22efd65-426d-4220-9e18-5a84827be8ac" path="/var/lib/kubelet/pods/b22efd65-426d-4220-9e18-5a84827be8ac/volumes" Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591797 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"6af5771aec210d838c858a9ef13cd4470da7d3d850a98b466bb4a90ccebc05aa"} Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591845 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"b345a3475862490a2b9e45fb1543d6767abbdd26f926e63556304a7b1364f0ce"} Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591861 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"06413143c2332d0c67f2f0ba31eeefeb7bd6f5c2aa451e5317f0f8ef56cdf3bb"} Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591874 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"f90b95e4e8660231598575b0c0c24ee10c36349f0b21c7e9deb1e9c4d362e78f"} Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591886 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"7c82e31b8f9c2c87aa0efd3957e46647b429850bf4152c84acddf5075d187d46"} Sep 29 17:02:37 crc kubenswrapper[4592]: I0929 17:02:37.591895 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"9647664150f376c8c7aba2adfd7fbe59da179d5ea686686551f2e2991e00b3a1"} Sep 29 17:02:39 crc kubenswrapper[4592]: I0929 17:02:39.613607 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"e407dc7364445c0c41195165b5a3e7eb4cd07741da6e393fdfd630d0329aa202"} Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.633381 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" event={"ID":"c2fe7eab-4398-4a63-8f2d-124e93afad84","Type":"ContainerStarted","Data":"b6c9981f16706bd3b633f33ece14091a81df46b11a5dffc0f3b1bfc8ba3392e0"} Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.634837 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.634985 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.635090 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.662321 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.686337 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:02:42 crc kubenswrapper[4592]: I0929 17:02:42.700956 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" podStartSLOduration=7.700934069 podStartE2EDuration="7.700934069s" podCreationTimestamp="2025-09-29 17:02:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:02:42.671339755 +0000 UTC m=+692.819117436" watchObservedRunningTime="2025-09-29 17:02:42.700934069 +0000 UTC m=+692.848711750" Sep 29 17:02:49 crc kubenswrapper[4592]: I0929 17:02:49.183472 4592 scope.go:117] "RemoveContainer" containerID="1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a" Sep 29 17:02:49 crc kubenswrapper[4592]: E0929 17:02:49.184566 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-gbbtb_openshift-multus(2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89)\"" pod="openshift-multus/multus-gbbtb" podUID="2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89" Sep 29 17:03:00 crc kubenswrapper[4592]: I0929 17:03:00.183099 4592 scope.go:117] "RemoveContainer" containerID="1ae5ae3cccc0d89a1a3c86a0ae3425f225ca17e767eff18c0e270245b182897a" Sep 29 17:03:00 crc kubenswrapper[4592]: I0929 17:03:00.725626 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-gbbtb_2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89/kube-multus/2.log" Sep 29 17:03:00 crc kubenswrapper[4592]: I0929 17:03:00.725925 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-gbbtb" event={"ID":"2c56ff1e-8dc0-43d1-bdf7-9eb71ffc5c89","Type":"ContainerStarted","Data":"81fc02b8d11f10da9f27e129ed2548730fab745b1b86e0cba8557d08ac2626fa"} Sep 29 17:03:06 crc kubenswrapper[4592]: I0929 17:03:06.016943 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rjq9v" Sep 29 17:03:11 crc kubenswrapper[4592]: I0929 17:03:11.961724 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb"] Sep 29 17:03:11 crc kubenswrapper[4592]: I0929 17:03:11.963465 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:11 crc kubenswrapper[4592]: I0929 17:03:11.965898 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 17:03:11 crc kubenswrapper[4592]: I0929 17:03:11.969646 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb"] Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.090280 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9pk7\" (UniqueName: \"kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.090336 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.090369 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.191485 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9pk7\" (UniqueName: \"kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.191535 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.191570 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.192000 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.192047 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.225981 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9pk7\" (UniqueName: \"kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7\") pod \"9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.276374 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.459571 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb"] Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.794206 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerStarted","Data":"abacd649ca24a2e35991160672ffdd9174ff8a9ed392c2dbfb6300e7b53b06dd"} Sep 29 17:03:12 crc kubenswrapper[4592]: I0929 17:03:12.794605 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerStarted","Data":"78c8f033d0590fb67d99435fe78ce2c4eb4e51d4a52177579d03c38fb6bca1c6"} Sep 29 17:03:13 crc kubenswrapper[4592]: I0929 17:03:13.801240 4592 generic.go:334] "Generic (PLEG): container finished" podID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerID="abacd649ca24a2e35991160672ffdd9174ff8a9ed392c2dbfb6300e7b53b06dd" exitCode=0 Sep 29 17:03:13 crc kubenswrapper[4592]: I0929 17:03:13.801289 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerDied","Data":"abacd649ca24a2e35991160672ffdd9174ff8a9ed392c2dbfb6300e7b53b06dd"} Sep 29 17:03:15 crc kubenswrapper[4592]: I0929 17:03:15.814650 4592 generic.go:334] "Generic (PLEG): container finished" podID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerID="f83b19ff1be373afacd8f5aa51a60909af5ad152bdc0f265ab074c7c0e320bfa" exitCode=0 Sep 29 17:03:15 crc kubenswrapper[4592]: I0929 17:03:15.814946 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerDied","Data":"f83b19ff1be373afacd8f5aa51a60909af5ad152bdc0f265ab074c7c0e320bfa"} Sep 29 17:03:16 crc kubenswrapper[4592]: I0929 17:03:16.823115 4592 generic.go:334] "Generic (PLEG): container finished" podID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerID="8b0f99b3f389b439b1b3fdc2fd393d98ff883fb7cb1157a2aa94f664bb91627b" exitCode=0 Sep 29 17:03:16 crc kubenswrapper[4592]: I0929 17:03:16.823236 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerDied","Data":"8b0f99b3f389b439b1b3fdc2fd393d98ff883fb7cb1157a2aa94f664bb91627b"} Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.043783 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.164673 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle\") pod \"9368ddaf-962a-4262-91ad-5febcadc8dbf\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.164754 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9pk7\" (UniqueName: \"kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7\") pod \"9368ddaf-962a-4262-91ad-5febcadc8dbf\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.164819 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util\") pod \"9368ddaf-962a-4262-91ad-5febcadc8dbf\" (UID: \"9368ddaf-962a-4262-91ad-5febcadc8dbf\") " Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.165473 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle" (OuterVolumeSpecName: "bundle") pod "9368ddaf-962a-4262-91ad-5febcadc8dbf" (UID: "9368ddaf-962a-4262-91ad-5febcadc8dbf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.173371 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7" (OuterVolumeSpecName: "kube-api-access-l9pk7") pod "9368ddaf-962a-4262-91ad-5febcadc8dbf" (UID: "9368ddaf-962a-4262-91ad-5febcadc8dbf"). InnerVolumeSpecName "kube-api-access-l9pk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.266495 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9pk7\" (UniqueName: \"kubernetes.io/projected/9368ddaf-962a-4262-91ad-5febcadc8dbf-kube-api-access-l9pk7\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.266526 4592 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.297139 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util" (OuterVolumeSpecName: "util") pod "9368ddaf-962a-4262-91ad-5febcadc8dbf" (UID: "9368ddaf-962a-4262-91ad-5febcadc8dbf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.367686 4592 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9368ddaf-962a-4262-91ad-5febcadc8dbf-util\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.836379 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" event={"ID":"9368ddaf-962a-4262-91ad-5febcadc8dbf","Type":"ContainerDied","Data":"78c8f033d0590fb67d99435fe78ce2c4eb4e51d4a52177579d03c38fb6bca1c6"} Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.836682 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78c8f033d0590fb67d99435fe78ce2c4eb4e51d4a52177579d03c38fb6bca1c6" Sep 29 17:03:18 crc kubenswrapper[4592]: I0929 17:03:18.836425 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.987891 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v"] Sep 29 17:03:19 crc kubenswrapper[4592]: E0929 17:03:19.988164 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="pull" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.988179 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="pull" Sep 29 17:03:19 crc kubenswrapper[4592]: E0929 17:03:19.988191 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="extract" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.988199 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="extract" Sep 29 17:03:19 crc kubenswrapper[4592]: E0929 17:03:19.988211 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="util" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.988218 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="util" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.988346 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9368ddaf-962a-4262-91ad-5febcadc8dbf" containerName="extract" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.988779 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.993426 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.993455 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-hm4rv" Sep 29 17:03:19 crc kubenswrapper[4592]: I0929 17:03:19.993633 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.004496 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v"] Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.086964 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hqp7\" (UniqueName: \"kubernetes.io/projected/e9ff4f23-0699-427f-86b3-275b408c261a-kube-api-access-7hqp7\") pod \"nmstate-operator-5d6f6cfd66-x6d4v\" (UID: \"e9ff4f23-0699-427f-86b3-275b408c261a\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.188089 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hqp7\" (UniqueName: \"kubernetes.io/projected/e9ff4f23-0699-427f-86b3-275b408c261a-kube-api-access-7hqp7\") pod \"nmstate-operator-5d6f6cfd66-x6d4v\" (UID: \"e9ff4f23-0699-427f-86b3-275b408c261a\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.204947 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hqp7\" (UniqueName: \"kubernetes.io/projected/e9ff4f23-0699-427f-86b3-275b408c261a-kube-api-access-7hqp7\") pod \"nmstate-operator-5d6f6cfd66-x6d4v\" (UID: \"e9ff4f23-0699-427f-86b3-275b408c261a\") " pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.302007 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.502643 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v"] Sep 29 17:03:20 crc kubenswrapper[4592]: I0929 17:03:20.868277 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" event={"ID":"e9ff4f23-0699-427f-86b3-275b408c261a","Type":"ContainerStarted","Data":"066e8c9b367e09e2ff47da1cb34b1513af490a146e0922a64cfebaec23f68285"} Sep 29 17:03:23 crc kubenswrapper[4592]: I0929 17:03:23.886138 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" event={"ID":"e9ff4f23-0699-427f-86b3-275b408c261a","Type":"ContainerStarted","Data":"1ceb908eadffc8cd17068b2aa4bd06435d3f2ce1eae03148b03372ae1e6e6586"} Sep 29 17:03:23 crc kubenswrapper[4592]: I0929 17:03:23.908294 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5d6f6cfd66-x6d4v" podStartSLOduration=2.263905953 podStartE2EDuration="4.908274659s" podCreationTimestamp="2025-09-29 17:03:19 +0000 UTC" firstStartedPulling="2025-09-29 17:03:20.515692239 +0000 UTC m=+730.663469920" lastFinishedPulling="2025-09-29 17:03:23.160060945 +0000 UTC m=+733.307838626" observedRunningTime="2025-09-29 17:03:23.902379119 +0000 UTC m=+734.050156800" watchObservedRunningTime="2025-09-29 17:03:23.908274659 +0000 UTC m=+734.056052340" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.825253 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.826070 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.828235 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-8m9dq" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.843307 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.846119 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.846789 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.850769 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.851286 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-zgcpn"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.852102 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.869721 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975639 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-ovs-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975703 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-nmstate-lock\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975802 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8nwg\" (UniqueName: \"kubernetes.io/projected/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-kube-api-access-q8nwg\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975855 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975883 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-dbus-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975912 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnpdx\" (UniqueName: \"kubernetes.io/projected/15e33da6-1266-4757-ab8b-bcbd435b8d26-kube-api-access-qnpdx\") pod \"nmstate-metrics-58fcddf996-5lhdp\" (UID: \"15e33da6-1266-4757-ab8b-bcbd435b8d26\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.975946 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42p89\" (UniqueName: \"kubernetes.io/projected/252fdf6a-56d5-473a-b492-e9b94bc89d19-kube-api-access-42p89\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.981731 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.982377 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.984035 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-9f7rp" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.984602 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.995026 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k"] Sep 29 17:03:24 crc kubenswrapper[4592]: I0929 17:03:24.998201 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077522 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks9zz\" (UniqueName: \"kubernetes.io/projected/f5019cbd-3156-4d20-9c40-163965b4ca0b-kube-api-access-ks9zz\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077571 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42p89\" (UniqueName: \"kubernetes.io/projected/252fdf6a-56d5-473a-b492-e9b94bc89d19-kube-api-access-42p89\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077670 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-ovs-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077714 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-nmstate-lock\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077750 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5019cbd-3156-4d20-9c40-163965b4ca0b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077776 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5019cbd-3156-4d20-9c40-163965b4ca0b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077794 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-ovs-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077854 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-nmstate-lock\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.077880 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8nwg\" (UniqueName: \"kubernetes.io/projected/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-kube-api-access-q8nwg\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.078176 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.078269 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-dbus-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.078304 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnpdx\" (UniqueName: \"kubernetes.io/projected/15e33da6-1266-4757-ab8b-bcbd435b8d26-kube-api-access-qnpdx\") pod \"nmstate-metrics-58fcddf996-5lhdp\" (UID: \"15e33da6-1266-4757-ab8b-bcbd435b8d26\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.078567 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/252fdf6a-56d5-473a-b492-e9b94bc89d19-dbus-socket\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.087086 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-tls-key-pair\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.097475 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42p89\" (UniqueName: \"kubernetes.io/projected/252fdf6a-56d5-473a-b492-e9b94bc89d19-kube-api-access-42p89\") pod \"nmstate-handler-zgcpn\" (UID: \"252fdf6a-56d5-473a-b492-e9b94bc89d19\") " pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.102294 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8nwg\" (UniqueName: \"kubernetes.io/projected/c9a4f2e3-e2ba-460b-92b2-a7cfda566c50-kube-api-access-q8nwg\") pod \"nmstate-webhook-6d689559c5-cfxmq\" (UID: \"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50\") " pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.115743 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnpdx\" (UniqueName: \"kubernetes.io/projected/15e33da6-1266-4757-ab8b-bcbd435b8d26-kube-api-access-qnpdx\") pod \"nmstate-metrics-58fcddf996-5lhdp\" (UID: \"15e33da6-1266-4757-ab8b-bcbd435b8d26\") " pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.138675 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.174469 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.179135 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5019cbd-3156-4d20-9c40-163965b4ca0b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.179219 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5019cbd-3156-4d20-9c40-163965b4ca0b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.179286 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks9zz\" (UniqueName: \"kubernetes.io/projected/f5019cbd-3156-4d20-9c40-163965b4ca0b-kube-api-access-ks9zz\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.180412 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/f5019cbd-3156-4d20-9c40-163965b4ca0b-nginx-conf\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.182925 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.192128 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/f5019cbd-3156-4d20-9c40-163965b4ca0b-plugin-serving-cert\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.210956 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks9zz\" (UniqueName: \"kubernetes.io/projected/f5019cbd-3156-4d20-9c40-163965b4ca0b-kube-api-access-ks9zz\") pod \"nmstate-console-plugin-864bb6dfb5-94s8k\" (UID: \"f5019cbd-3156-4d20-9c40-163965b4ca0b\") " pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.218050 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-bcf45554c-zthnl"] Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.218736 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.279928 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-oauth-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280270 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280302 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k5dp\" (UniqueName: \"kubernetes.io/projected/ef723855-bf21-4874-ad1b-159938056b3c-kube-api-access-6k5dp\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280359 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-service-ca\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280387 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-trusted-ca-bundle\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280405 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-oauth-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.280447 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-console-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.284950 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bcf45554c-zthnl"] Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.297788 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383708 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-console-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383754 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-oauth-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383789 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383810 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k5dp\" (UniqueName: \"kubernetes.io/projected/ef723855-bf21-4874-ad1b-159938056b3c-kube-api-access-6k5dp\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383850 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-service-ca\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383868 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-trusted-ca-bundle\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.383888 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-oauth-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.384704 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-oauth-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.385130 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-service-ca\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.385549 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-trusted-ca-bundle\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.385957 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ef723855-bf21-4874-ad1b-159938056b3c-console-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.391040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-serving-cert\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.392610 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ef723855-bf21-4874-ad1b-159938056b3c-console-oauth-config\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.404868 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k5dp\" (UniqueName: \"kubernetes.io/projected/ef723855-bf21-4874-ad1b-159938056b3c-kube-api-access-6k5dp\") pod \"console-bcf45554c-zthnl\" (UID: \"ef723855-bf21-4874-ad1b-159938056b3c\") " pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.521454 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq"] Sep 29 17:03:25 crc kubenswrapper[4592]: W0929 17:03:25.528230 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9a4f2e3_e2ba_460b_92b2_a7cfda566c50.slice/crio-368e3532ba7c408415d4f019a6ae24745c98477e912a9247e7072cd25f922d84 WatchSource:0}: Error finding container 368e3532ba7c408415d4f019a6ae24745c98477e912a9247e7072cd25f922d84: Status 404 returned error can't find the container with id 368e3532ba7c408415d4f019a6ae24745c98477e912a9247e7072cd25f922d84 Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.569899 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.571757 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k"] Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.581609 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp"] Sep 29 17:03:25 crc kubenswrapper[4592]: W0929 17:03:25.581721 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5019cbd_3156_4d20_9c40_163965b4ca0b.slice/crio-7863235590c554a6850593ce3fb385b28a4cd75b6db0ed717321f13b5ada8a37 WatchSource:0}: Error finding container 7863235590c554a6850593ce3fb385b28a4cd75b6db0ed717321f13b5ada8a37: Status 404 returned error can't find the container with id 7863235590c554a6850593ce3fb385b28a4cd75b6db0ed717321f13b5ada8a37 Sep 29 17:03:25 crc kubenswrapper[4592]: W0929 17:03:25.589268 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15e33da6_1266_4757_ab8b_bcbd435b8d26.slice/crio-6ee69badf6b6871ab6ff528a0636fd5a065aff4f6328fc1f3b2f1450061d0e79 WatchSource:0}: Error finding container 6ee69badf6b6871ab6ff528a0636fd5a065aff4f6328fc1f3b2f1450061d0e79: Status 404 returned error can't find the container with id 6ee69badf6b6871ab6ff528a0636fd5a065aff4f6328fc1f3b2f1450061d0e79 Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.797878 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bcf45554c-zthnl"] Sep 29 17:03:25 crc kubenswrapper[4592]: W0929 17:03:25.799049 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef723855_bf21_4874_ad1b_159938056b3c.slice/crio-edcb139eab56b68b2bdfd67b2962e6e2767e3fb70b5e352bca658332cd5ba767 WatchSource:0}: Error finding container edcb139eab56b68b2bdfd67b2962e6e2767e3fb70b5e352bca658332cd5ba767: Status 404 returned error can't find the container with id edcb139eab56b68b2bdfd67b2962e6e2767e3fb70b5e352bca658332cd5ba767 Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.910772 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bcf45554c-zthnl" event={"ID":"ef723855-bf21-4874-ad1b-159938056b3c","Type":"ContainerStarted","Data":"edcb139eab56b68b2bdfd67b2962e6e2767e3fb70b5e352bca658332cd5ba767"} Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.913930 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" event={"ID":"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50","Type":"ContainerStarted","Data":"368e3532ba7c408415d4f019a6ae24745c98477e912a9247e7072cd25f922d84"} Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.915608 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" event={"ID":"f5019cbd-3156-4d20-9c40-163965b4ca0b","Type":"ContainerStarted","Data":"7863235590c554a6850593ce3fb385b28a4cd75b6db0ed717321f13b5ada8a37"} Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.917365 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zgcpn" event={"ID":"252fdf6a-56d5-473a-b492-e9b94bc89d19","Type":"ContainerStarted","Data":"f562b21ba1a4e3411c398469e8d3bd4dc3dc98f63b3dfcdcf359a1440889feae"} Sep 29 17:03:25 crc kubenswrapper[4592]: I0929 17:03:25.918630 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" event={"ID":"15e33da6-1266-4757-ab8b-bcbd435b8d26","Type":"ContainerStarted","Data":"6ee69badf6b6871ab6ff528a0636fd5a065aff4f6328fc1f3b2f1450061d0e79"} Sep 29 17:03:26 crc kubenswrapper[4592]: I0929 17:03:26.926374 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bcf45554c-zthnl" event={"ID":"ef723855-bf21-4874-ad1b-159938056b3c","Type":"ContainerStarted","Data":"08f351e16112a0c944d760902a544370cc0b1f0effafb5d66812f9269a9fdfb4"} Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.963461 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-zgcpn" event={"ID":"252fdf6a-56d5-473a-b492-e9b94bc89d19","Type":"ContainerStarted","Data":"0d0230be75e09e676197a0f37ea6e627ae76af516a0baa5984e644ed3072e348"} Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.963938 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.965910 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" event={"ID":"15e33da6-1266-4757-ab8b-bcbd435b8d26","Type":"ContainerStarted","Data":"e0d99606026ee16c4aafc23c8208ede2905ea68c55d390d219bcf1a6a8b1bd10"} Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.970390 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" event={"ID":"c9a4f2e3-e2ba-460b-92b2-a7cfda566c50","Type":"ContainerStarted","Data":"9542944a3bbcb02dee9238d724e1489791a9ff4e632305363804483fa6f391a4"} Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.970889 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.972705 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" event={"ID":"f5019cbd-3156-4d20-9c40-163965b4ca0b","Type":"ContainerStarted","Data":"d7bb915d5f20f2c4863f90d4e872cee6abf0e3af40c8aaa130fd62f6c9c17c7c"} Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.983651 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-bcf45554c-zthnl" podStartSLOduration=4.98363185 podStartE2EDuration="4.98363185s" podCreationTimestamp="2025-09-29 17:03:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:03:26.948738244 +0000 UTC m=+737.096515925" watchObservedRunningTime="2025-09-29 17:03:29.98363185 +0000 UTC m=+740.131409531" Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.985299 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-zgcpn" podStartSLOduration=2.445922032 podStartE2EDuration="5.985292378s" podCreationTimestamp="2025-09-29 17:03:24 +0000 UTC" firstStartedPulling="2025-09-29 17:03:25.285080913 +0000 UTC m=+735.432858594" lastFinishedPulling="2025-09-29 17:03:28.824451259 +0000 UTC m=+738.972228940" observedRunningTime="2025-09-29 17:03:29.985269168 +0000 UTC m=+740.133046869" watchObservedRunningTime="2025-09-29 17:03:29.985292378 +0000 UTC m=+740.133070059" Sep 29 17:03:29 crc kubenswrapper[4592]: I0929 17:03:29.997283 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-864bb6dfb5-94s8k" podStartSLOduration=2.7630929010000003 podStartE2EDuration="5.997265573s" podCreationTimestamp="2025-09-29 17:03:24 +0000 UTC" firstStartedPulling="2025-09-29 17:03:25.584334036 +0000 UTC m=+735.732111717" lastFinishedPulling="2025-09-29 17:03:28.818506698 +0000 UTC m=+738.966284389" observedRunningTime="2025-09-29 17:03:29.996670616 +0000 UTC m=+740.144448297" watchObservedRunningTime="2025-09-29 17:03:29.997265573 +0000 UTC m=+740.145043254" Sep 29 17:03:30 crc kubenswrapper[4592]: I0929 17:03:30.026014 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" podStartSLOduration=2.719249551 podStartE2EDuration="6.0259802s" podCreationTimestamp="2025-09-29 17:03:24 +0000 UTC" firstStartedPulling="2025-09-29 17:03:25.530711673 +0000 UTC m=+735.678489354" lastFinishedPulling="2025-09-29 17:03:28.837442332 +0000 UTC m=+738.985220003" observedRunningTime="2025-09-29 17:03:30.023475328 +0000 UTC m=+740.171253009" watchObservedRunningTime="2025-09-29 17:03:30.0259802 +0000 UTC m=+740.173757881" Sep 29 17:03:32 crc kubenswrapper[4592]: I0929 17:03:32.988630 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" event={"ID":"15e33da6-1266-4757-ab8b-bcbd435b8d26","Type":"ContainerStarted","Data":"8e3d3a5f856ce694fc5a82522a6857664343cb3d63bb7be9e1fbfa83d93f5d14"} Sep 29 17:03:33 crc kubenswrapper[4592]: I0929 17:03:33.006211 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-58fcddf996-5lhdp" podStartSLOduration=2.69565132 podStartE2EDuration="9.006193621s" podCreationTimestamp="2025-09-29 17:03:24 +0000 UTC" firstStartedPulling="2025-09-29 17:03:25.593088428 +0000 UTC m=+735.740866109" lastFinishedPulling="2025-09-29 17:03:31.903630729 +0000 UTC m=+742.051408410" observedRunningTime="2025-09-29 17:03:33.005612624 +0000 UTC m=+743.153390325" watchObservedRunningTime="2025-09-29 17:03:33.006193621 +0000 UTC m=+743.153971302" Sep 29 17:03:35 crc kubenswrapper[4592]: I0929 17:03:35.211164 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-zgcpn" Sep 29 17:03:35 crc kubenswrapper[4592]: I0929 17:03:35.570739 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:35 crc kubenswrapper[4592]: I0929 17:03:35.570783 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:35 crc kubenswrapper[4592]: I0929 17:03:35.576635 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:36 crc kubenswrapper[4592]: I0929 17:03:36.014203 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-bcf45554c-zthnl" Sep 29 17:03:36 crc kubenswrapper[4592]: I0929 17:03:36.093481 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 17:03:39 crc kubenswrapper[4592]: I0929 17:03:39.505073 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 17:03:39 crc kubenswrapper[4592]: I0929 17:03:39.505864 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" containerID="cri-o://d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78" gracePeriod=30 Sep 29 17:03:39 crc kubenswrapper[4592]: I0929 17:03:39.649158 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 17:03:39 crc kubenswrapper[4592]: I0929 17:03:39.649355 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerName="route-controller-manager" containerID="cri-o://898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2" gracePeriod=30 Sep 29 17:03:39 crc kubenswrapper[4592]: I0929 17:03:39.964973 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.028947 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.030432 4592 generic.go:334] "Generic (PLEG): container finished" podID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerID="d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78" exitCode=0 Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.030508 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.030506 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" event={"ID":"107b2d55-7d06-4091-b57c-bcf7c3635060","Type":"ContainerDied","Data":"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78"} Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.030754 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-g2gnz" event={"ID":"107b2d55-7d06-4091-b57c-bcf7c3635060","Type":"ContainerDied","Data":"6ece491d53068e5d5d7eb3de6cf88e5c361060df58914262d90baabf6137858f"} Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.030778 4592 scope.go:117] "RemoveContainer" containerID="d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.033785 4592 generic.go:334] "Generic (PLEG): container finished" podID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerID="898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2" exitCode=0 Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.033822 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" event={"ID":"bb9789af-7be4-40cf-a9da-df45fa8522f7","Type":"ContainerDied","Data":"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2"} Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.033846 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" event={"ID":"bb9789af-7be4-40cf-a9da-df45fa8522f7","Type":"ContainerDied","Data":"951f5c9751572196a1c5138272813ec6a4b6d9d7eefe52071970f219e6435e82"} Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.033900 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.058139 4592 scope.go:117] "RemoveContainer" containerID="d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78" Sep 29 17:03:40 crc kubenswrapper[4592]: E0929 17:03:40.058679 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78\": container with ID starting with d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78 not found: ID does not exist" containerID="d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.058743 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78"} err="failed to get container status \"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78\": rpc error: code = NotFound desc = could not find container \"d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78\": container with ID starting with d2dfd17eb2c33073e9814678a86f91c43d6e9c8fb601bdd362c5f8e128ef2e78 not found: ID does not exist" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.058778 4592 scope.go:117] "RemoveContainer" containerID="898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.071260 4592 scope.go:117] "RemoveContainer" containerID="898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2" Sep 29 17:03:40 crc kubenswrapper[4592]: E0929 17:03:40.071696 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2\": container with ID starting with 898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2 not found: ID does not exist" containerID="898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.071804 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2"} err="failed to get container status \"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2\": rpc error: code = NotFound desc = could not find container \"898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2\": container with ID starting with 898b1d4e1b2ce373e15c6daccbef4d3b5000457b1a8d05c37ab53556227653c2 not found: ID does not exist" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.075435 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles\") pod \"107b2d55-7d06-4091-b57c-bcf7c3635060\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.075514 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config\") pod \"107b2d55-7d06-4091-b57c-bcf7c3635060\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.075624 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca\") pod \"107b2d55-7d06-4091-b57c-bcf7c3635060\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.076455 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca" (OuterVolumeSpecName: "client-ca") pod "107b2d55-7d06-4091-b57c-bcf7c3635060" (UID: "107b2d55-7d06-4091-b57c-bcf7c3635060"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.076527 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "107b2d55-7d06-4091-b57c-bcf7c3635060" (UID: "107b2d55-7d06-4091-b57c-bcf7c3635060"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.076540 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config" (OuterVolumeSpecName: "config") pod "107b2d55-7d06-4091-b57c-bcf7c3635060" (UID: "107b2d55-7d06-4091-b57c-bcf7c3635060"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.075729 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert\") pod \"107b2d55-7d06-4091-b57c-bcf7c3635060\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.076712 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zrpz\" (UniqueName: \"kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz\") pod \"107b2d55-7d06-4091-b57c-bcf7c3635060\" (UID: \"107b2d55-7d06-4091-b57c-bcf7c3635060\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.077018 4592 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.077042 4592 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.077052 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/107b2d55-7d06-4091-b57c-bcf7c3635060-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.082623 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz" (OuterVolumeSpecName: "kube-api-access-8zrpz") pod "107b2d55-7d06-4091-b57c-bcf7c3635060" (UID: "107b2d55-7d06-4091-b57c-bcf7c3635060"). InnerVolumeSpecName "kube-api-access-8zrpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.083603 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "107b2d55-7d06-4091-b57c-bcf7c3635060" (UID: "107b2d55-7d06-4091-b57c-bcf7c3635060"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.177954 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config\") pod \"bb9789af-7be4-40cf-a9da-df45fa8522f7\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.178389 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca\") pod \"bb9789af-7be4-40cf-a9da-df45fa8522f7\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.178580 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert\") pod \"bb9789af-7be4-40cf-a9da-df45fa8522f7\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.178669 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qbzp\" (UniqueName: \"kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp\") pod \"bb9789af-7be4-40cf-a9da-df45fa8522f7\" (UID: \"bb9789af-7be4-40cf-a9da-df45fa8522f7\") " Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.178865 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config" (OuterVolumeSpecName: "config") pod "bb9789af-7be4-40cf-a9da-df45fa8522f7" (UID: "bb9789af-7be4-40cf-a9da-df45fa8522f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.178862 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca" (OuterVolumeSpecName: "client-ca") pod "bb9789af-7be4-40cf-a9da-df45fa8522f7" (UID: "bb9789af-7be4-40cf-a9da-df45fa8522f7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.179224 4592 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-client-ca\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.179249 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/107b2d55-7d06-4091-b57c-bcf7c3635060-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.179262 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zrpz\" (UniqueName: \"kubernetes.io/projected/107b2d55-7d06-4091-b57c-bcf7c3635060-kube-api-access-8zrpz\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.179276 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb9789af-7be4-40cf-a9da-df45fa8522f7-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.181578 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bb9789af-7be4-40cf-a9da-df45fa8522f7" (UID: "bb9789af-7be4-40cf-a9da-df45fa8522f7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.181596 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp" (OuterVolumeSpecName: "kube-api-access-7qbzp") pod "bb9789af-7be4-40cf-a9da-df45fa8522f7" (UID: "bb9789af-7be4-40cf-a9da-df45fa8522f7"). InnerVolumeSpecName "kube-api-access-7qbzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.280859 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qbzp\" (UniqueName: \"kubernetes.io/projected/bb9789af-7be4-40cf-a9da-df45fa8522f7-kube-api-access-7qbzp\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.280927 4592 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb9789af-7be4-40cf-a9da-df45fa8522f7-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.365962 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.371019 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-g478g"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.379802 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.386168 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-g2gnz"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.756221 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6879dd76fd-fjgmq"] Sep 29 17:03:40 crc kubenswrapper[4592]: E0929 17:03:40.756571 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerName="route-controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.756591 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerName="route-controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: E0929 17:03:40.756622 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.756694 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.756831 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" containerName="route-controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.756849 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" containerName="controller-manager" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.757444 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.759615 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.759743 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.763224 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.763224 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.763431 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.763534 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.764317 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.766336 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.769952 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6879dd76fd-fjgmq"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.770281 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.770653 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.770997 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.771965 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.772413 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.772471 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.772724 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.775744 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5"] Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889324 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-config\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889381 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46cc0406-d437-431f-90e4-b04a4156d39d-serving-cert\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889408 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-client-ca\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-config\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889490 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vrql\" (UniqueName: \"kubernetes.io/projected/46cc0406-d437-431f-90e4-b04a4156d39d-kube-api-access-7vrql\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889525 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/116c1330-2d97-47a6-bf5b-b528c978f355-serving-cert\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889559 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-proxy-ca-bundles\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889576 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-client-ca\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.889594 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8w95\" (UniqueName: \"kubernetes.io/projected/116c1330-2d97-47a6-bf5b-b528c978f355-kube-api-access-h8w95\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990570 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-config\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990623 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46cc0406-d437-431f-90e4-b04a4156d39d-serving-cert\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990645 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-client-ca\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990665 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-config\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990734 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vrql\" (UniqueName: \"kubernetes.io/projected/46cc0406-d437-431f-90e4-b04a4156d39d-kube-api-access-7vrql\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990773 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/116c1330-2d97-47a6-bf5b-b528c978f355-serving-cert\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990792 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-proxy-ca-bundles\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990808 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-client-ca\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.990824 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8w95\" (UniqueName: \"kubernetes.io/projected/116c1330-2d97-47a6-bf5b-b528c978f355-kube-api-access-h8w95\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.992352 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-client-ca\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.992631 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-config\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.993028 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/46cc0406-d437-431f-90e4-b04a4156d39d-proxy-ca-bundles\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.993874 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-config\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:40 crc kubenswrapper[4592]: I0929 17:03:40.994885 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/116c1330-2d97-47a6-bf5b-b528c978f355-client-ca\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:40.999787 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46cc0406-d437-431f-90e4-b04a4156d39d-serving-cert\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.001819 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/116c1330-2d97-47a6-bf5b-b528c978f355-serving-cert\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.009825 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8w95\" (UniqueName: \"kubernetes.io/projected/116c1330-2d97-47a6-bf5b-b528c978f355-kube-api-access-h8w95\") pod \"route-controller-manager-69546f855f-cdcv5\" (UID: \"116c1330-2d97-47a6-bf5b-b528c978f355\") " pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.016015 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vrql\" (UniqueName: \"kubernetes.io/projected/46cc0406-d437-431f-90e4-b04a4156d39d-kube-api-access-7vrql\") pod \"controller-manager-6879dd76fd-fjgmq\" (UID: \"46cc0406-d437-431f-90e4-b04a4156d39d\") " pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.084422 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.094082 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.198633 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107b2d55-7d06-4091-b57c-bcf7c3635060" path="/var/lib/kubelet/pods/107b2d55-7d06-4091-b57c-bcf7c3635060/volumes" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.199377 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb9789af-7be4-40cf-a9da-df45fa8522f7" path="/var/lib/kubelet/pods/bb9789af-7be4-40cf-a9da-df45fa8522f7/volumes" Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.305208 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6879dd76fd-fjgmq"] Sep 29 17:03:41 crc kubenswrapper[4592]: W0929 17:03:41.313583 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46cc0406_d437_431f_90e4_b04a4156d39d.slice/crio-39c063b1e179bb541ef2753b971ce166f9285ade87e012f8915059256d7d616f WatchSource:0}: Error finding container 39c063b1e179bb541ef2753b971ce166f9285ade87e012f8915059256d7d616f: Status 404 returned error can't find the container with id 39c063b1e179bb541ef2753b971ce166f9285ade87e012f8915059256d7d616f Sep 29 17:03:41 crc kubenswrapper[4592]: I0929 17:03:41.357051 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5"] Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.047653 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" event={"ID":"46cc0406-d437-431f-90e4-b04a4156d39d","Type":"ContainerStarted","Data":"b7a223ffeff86de2f93726e78564e4b68a22e1f110515e29f5de29bcf2c2a4d7"} Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.047992 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" event={"ID":"46cc0406-d437-431f-90e4-b04a4156d39d","Type":"ContainerStarted","Data":"39c063b1e179bb541ef2753b971ce166f9285ade87e012f8915059256d7d616f"} Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.048018 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.050699 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" event={"ID":"116c1330-2d97-47a6-bf5b-b528c978f355","Type":"ContainerStarted","Data":"7d5837b686585f216ab1191d4a00c0d6b22564e295372f487480762ace4de35b"} Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.050745 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" event={"ID":"116c1330-2d97-47a6-bf5b-b528c978f355","Type":"ContainerStarted","Data":"8734d45f4621d2a257a20ab9b89bc3a214631c36ff8226d9ffe51d4c4a7db40b"} Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.051882 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.058288 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.060173 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.090458 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-69546f855f-cdcv5" podStartSLOduration=3.090441452 podStartE2EDuration="3.090441452s" podCreationTimestamp="2025-09-29 17:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:03:42.087598251 +0000 UTC m=+752.235375952" watchObservedRunningTime="2025-09-29 17:03:42.090441452 +0000 UTC m=+752.238219133" Sep 29 17:03:42 crc kubenswrapper[4592]: I0929 17:03:42.091120 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6879dd76fd-fjgmq" podStartSLOduration=3.091114852 podStartE2EDuration="3.091114852s" podCreationTimestamp="2025-09-29 17:03:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:03:42.073559777 +0000 UTC m=+752.221337478" watchObservedRunningTime="2025-09-29 17:03:42.091114852 +0000 UTC m=+752.238892533" Sep 29 17:03:45 crc kubenswrapper[4592]: I0929 17:03:45.180816 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6d689559c5-cfxmq" Sep 29 17:03:49 crc kubenswrapper[4592]: I0929 17:03:49.640385 4592 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.792299 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9"] Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.793839 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.805544 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.807101 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9"] Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.911943 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.912012 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js87b\" (UniqueName: \"kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:57 crc kubenswrapper[4592]: I0929 17:03:57.912049 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.013646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.013729 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.013765 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js87b\" (UniqueName: \"kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.014347 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.014397 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.035234 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js87b\" (UniqueName: \"kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.111241 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:03:58 crc kubenswrapper[4592]: I0929 17:03:58.489965 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9"] Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.143720 4592 generic.go:334] "Generic (PLEG): container finished" podID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerID="d87c99bab18c4237f6dc12457a95640c06a16cc4ff3cd2a305f3d210398d81dc" exitCode=0 Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.143855 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" event={"ID":"f3c7e075-9f35-4418-b416-d5839c9d6b88","Type":"ContainerDied","Data":"d87c99bab18c4237f6dc12457a95640c06a16cc4ff3cd2a305f3d210398d81dc"} Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.144497 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" event={"ID":"f3c7e075-9f35-4418-b416-d5839c9d6b88","Type":"ContainerStarted","Data":"e24b2797210cce3ec4ec3fb3d98f931772c443917f6df29f13c4ccae0dffd83d"} Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.523336 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.524517 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.532545 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.640769 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.640811 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.640896 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8skw\" (UniqueName: \"kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.742216 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.742585 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.742628 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8skw\" (UniqueName: \"kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.742742 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.743015 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.769204 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8skw\" (UniqueName: \"kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw\") pod \"redhat-operators-hgt66\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:03:59 crc kubenswrapper[4592]: I0929 17:03:59.840521 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:00 crc kubenswrapper[4592]: I0929 17:04:00.250728 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:04:00 crc kubenswrapper[4592]: W0929 17:04:00.260874 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode92acd36_14cf_4a40_a9ab_e8c6494cadad.slice/crio-f4e6f27b2ec03743457c2307b00e7699cdbbf316396558d0e8cf7727beb2a2df WatchSource:0}: Error finding container f4e6f27b2ec03743457c2307b00e7699cdbbf316396558d0e8cf7727beb2a2df: Status 404 returned error can't find the container with id f4e6f27b2ec03743457c2307b00e7699cdbbf316396558d0e8cf7727beb2a2df Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.136840 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-zn6hr" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" containerID="cri-o://73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc" gracePeriod=15 Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.157339 4592 generic.go:334] "Generic (PLEG): container finished" podID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerID="1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728" exitCode=0 Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.157385 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerDied","Data":"1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728"} Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.157412 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerStarted","Data":"f4e6f27b2ec03743457c2307b00e7699cdbbf316396558d0e8cf7727beb2a2df"} Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.574017 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zn6hr_e586a2e2-918f-40e6-b7eb-9e937dd20c32/console/0.log" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.574318 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666244 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666319 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666335 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666396 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glxzk\" (UniqueName: \"kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666417 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666464 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.666481 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config\") pod \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\" (UID: \"e586a2e2-918f-40e6-b7eb-9e937dd20c32\") " Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.667488 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.667531 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca" (OuterVolumeSpecName: "service-ca") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.667561 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config" (OuterVolumeSpecName: "console-config") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.667601 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.675518 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk" (OuterVolumeSpecName: "kube-api-access-glxzk") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "kube-api-access-glxzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.675613 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.681508 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "e586a2e2-918f-40e6-b7eb-9e937dd20c32" (UID: "e586a2e2-918f-40e6-b7eb-9e937dd20c32"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767662 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glxzk\" (UniqueName: \"kubernetes.io/projected/e586a2e2-918f-40e6-b7eb-9e937dd20c32-kube-api-access-glxzk\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767706 4592 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767716 4592 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767725 4592 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767734 4592 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/e586a2e2-918f-40e6-b7eb-9e937dd20c32-console-oauth-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767741 4592 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:01 crc kubenswrapper[4592]: I0929 17:04:01.767751 4592 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e586a2e2-918f-40e6-b7eb-9e937dd20c32-service-ca\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.165886 4592 generic.go:334] "Generic (PLEG): container finished" podID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerID="4b17f8a840b7c3b15d6d3c53bdda376ae313e2de8b50bcac503f287bb3206643" exitCode=0 Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.166216 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" event={"ID":"f3c7e075-9f35-4418-b416-d5839c9d6b88","Type":"ContainerDied","Data":"4b17f8a840b7c3b15d6d3c53bdda376ae313e2de8b50bcac503f287bb3206643"} Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170428 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-zn6hr_e586a2e2-918f-40e6-b7eb-9e937dd20c32/console/0.log" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170535 4592 generic.go:334] "Generic (PLEG): container finished" podID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerID="73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc" exitCode=2 Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170632 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zn6hr" event={"ID":"e586a2e2-918f-40e6-b7eb-9e937dd20c32","Type":"ContainerDied","Data":"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc"} Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170701 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zn6hr" event={"ID":"e586a2e2-918f-40e6-b7eb-9e937dd20c32","Type":"ContainerDied","Data":"310d361df25db221565ce0c13514fecb91633ca5b6a199b81390e80ef0e487e2"} Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170763 4592 scope.go:117] "RemoveContainer" containerID="73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.170937 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zn6hr" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.180537 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerStarted","Data":"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104"} Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.190832 4592 scope.go:117] "RemoveContainer" containerID="73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc" Sep 29 17:04:02 crc kubenswrapper[4592]: E0929 17:04:02.191439 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc\": container with ID starting with 73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc not found: ID does not exist" containerID="73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.191546 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc"} err="failed to get container status \"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc\": rpc error: code = NotFound desc = could not find container \"73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc\": container with ID starting with 73799d881814b781ed451e2c2a0a9c81d45f88071a53163810f35cc441ea81cc not found: ID does not exist" Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.262774 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 17:04:02 crc kubenswrapper[4592]: I0929 17:04:02.266676 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-zn6hr"] Sep 29 17:04:03 crc kubenswrapper[4592]: I0929 17:04:03.192299 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" path="/var/lib/kubelet/pods/e586a2e2-918f-40e6-b7eb-9e937dd20c32/volumes" Sep 29 17:04:03 crc kubenswrapper[4592]: I0929 17:04:03.194099 4592 generic.go:334] "Generic (PLEG): container finished" podID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerID="47590c2ed8ba075b2ccfcd044d8543a90391ebe422296b4359ee3d7552ab9f6a" exitCode=0 Sep 29 17:04:03 crc kubenswrapper[4592]: I0929 17:04:03.194283 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" event={"ID":"f3c7e075-9f35-4418-b416-d5839c9d6b88","Type":"ContainerDied","Data":"47590c2ed8ba075b2ccfcd044d8543a90391ebe422296b4359ee3d7552ab9f6a"} Sep 29 17:04:03 crc kubenswrapper[4592]: I0929 17:04:03.197946 4592 generic.go:334] "Generic (PLEG): container finished" podID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerID="b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104" exitCode=0 Sep 29 17:04:03 crc kubenswrapper[4592]: I0929 17:04:03.198022 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerDied","Data":"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104"} Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.575578 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.713424 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle\") pod \"f3c7e075-9f35-4418-b416-d5839c9d6b88\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.713530 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util\") pod \"f3c7e075-9f35-4418-b416-d5839c9d6b88\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.713565 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js87b\" (UniqueName: \"kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b\") pod \"f3c7e075-9f35-4418-b416-d5839c9d6b88\" (UID: \"f3c7e075-9f35-4418-b416-d5839c9d6b88\") " Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.714638 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle" (OuterVolumeSpecName: "bundle") pod "f3c7e075-9f35-4418-b416-d5839c9d6b88" (UID: "f3c7e075-9f35-4418-b416-d5839c9d6b88"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.721290 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b" (OuterVolumeSpecName: "kube-api-access-js87b") pod "f3c7e075-9f35-4418-b416-d5839c9d6b88" (UID: "f3c7e075-9f35-4418-b416-d5839c9d6b88"). InnerVolumeSpecName "kube-api-access-js87b". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.724335 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util" (OuterVolumeSpecName: "util") pod "f3c7e075-9f35-4418-b416-d5839c9d6b88" (UID: "f3c7e075-9f35-4418-b416-d5839c9d6b88"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.814849 4592 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.814888 4592 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f3c7e075-9f35-4418-b416-d5839c9d6b88-util\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:04 crc kubenswrapper[4592]: I0929 17:04:04.814896 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js87b\" (UniqueName: \"kubernetes.io/projected/f3c7e075-9f35-4418-b416-d5839c9d6b88-kube-api-access-js87b\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:05 crc kubenswrapper[4592]: I0929 17:04:05.213066 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerStarted","Data":"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36"} Sep 29 17:04:05 crc kubenswrapper[4592]: I0929 17:04:05.215179 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" Sep 29 17:04:05 crc kubenswrapper[4592]: I0929 17:04:05.215189 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9" event={"ID":"f3c7e075-9f35-4418-b416-d5839c9d6b88","Type":"ContainerDied","Data":"e24b2797210cce3ec4ec3fb3d98f931772c443917f6df29f13c4ccae0dffd83d"} Sep 29 17:04:05 crc kubenswrapper[4592]: I0929 17:04:05.215227 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e24b2797210cce3ec4ec3fb3d98f931772c443917f6df29f13c4ccae0dffd83d" Sep 29 17:04:05 crc kubenswrapper[4592]: I0929 17:04:05.229778 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hgt66" podStartSLOduration=3.327886046 podStartE2EDuration="6.229760653s" podCreationTimestamp="2025-09-29 17:03:59 +0000 UTC" firstStartedPulling="2025-09-29 17:04:01.158917443 +0000 UTC m=+771.306695124" lastFinishedPulling="2025-09-29 17:04:04.06079205 +0000 UTC m=+774.208569731" observedRunningTime="2025-09-29 17:04:05.229566378 +0000 UTC m=+775.377344059" watchObservedRunningTime="2025-09-29 17:04:05.229760653 +0000 UTC m=+775.377538344" Sep 29 17:04:09 crc kubenswrapper[4592]: I0929 17:04:09.841574 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:09 crc kubenswrapper[4592]: I0929 17:04:09.841989 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:09 crc kubenswrapper[4592]: I0929 17:04:09.905344 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:10 crc kubenswrapper[4592]: I0929 17:04:10.286907 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:11 crc kubenswrapper[4592]: I0929 17:04:11.908321 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.255174 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hgt66" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="registry-server" containerID="cri-o://8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36" gracePeriod=2 Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.732812 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.817953 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities\") pod \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.818014 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content\") pod \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.818096 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8skw\" (UniqueName: \"kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw\") pod \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\" (UID: \"e92acd36-14cf-4a40-a9ab-e8c6494cadad\") " Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.819167 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities" (OuterVolumeSpecName: "utilities") pod "e92acd36-14cf-4a40-a9ab-e8c6494cadad" (UID: "e92acd36-14cf-4a40-a9ab-e8c6494cadad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.823479 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw" (OuterVolumeSpecName: "kube-api-access-l8skw") pod "e92acd36-14cf-4a40-a9ab-e8c6494cadad" (UID: "e92acd36-14cf-4a40-a9ab-e8c6494cadad"). InnerVolumeSpecName "kube-api-access-l8skw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.898502 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e92acd36-14cf-4a40-a9ab-e8c6494cadad" (UID: "e92acd36-14cf-4a40-a9ab-e8c6494cadad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.920776 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.920808 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e92acd36-14cf-4a40-a9ab-e8c6494cadad-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:12 crc kubenswrapper[4592]: I0929 17:04:12.920820 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8skw\" (UniqueName: \"kubernetes.io/projected/e92acd36-14cf-4a40-a9ab-e8c6494cadad-kube-api-access-l8skw\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.262815 4592 generic.go:334] "Generic (PLEG): container finished" podID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerID="8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36" exitCode=0 Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.262859 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerDied","Data":"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36"} Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.262882 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgt66" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.262903 4592 scope.go:117] "RemoveContainer" containerID="8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.262887 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgt66" event={"ID":"e92acd36-14cf-4a40-a9ab-e8c6494cadad","Type":"ContainerDied","Data":"f4e6f27b2ec03743457c2307b00e7699cdbbf316396558d0e8cf7727beb2a2df"} Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.286484 4592 scope.go:117] "RemoveContainer" containerID="b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.289365 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.294279 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hgt66"] Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.314137 4592 scope.go:117] "RemoveContainer" containerID="1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.336775 4592 scope.go:117] "RemoveContainer" containerID="8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36" Sep 29 17:04:13 crc kubenswrapper[4592]: E0929 17:04:13.337354 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36\": container with ID starting with 8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36 not found: ID does not exist" containerID="8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.337401 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36"} err="failed to get container status \"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36\": rpc error: code = NotFound desc = could not find container \"8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36\": container with ID starting with 8a2afb2ce22a244debcb37894bc882644a55c2374b8c3e50dc416336bebd1d36 not found: ID does not exist" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.337427 4592 scope.go:117] "RemoveContainer" containerID="b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104" Sep 29 17:04:13 crc kubenswrapper[4592]: E0929 17:04:13.337865 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104\": container with ID starting with b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104 not found: ID does not exist" containerID="b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.337917 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104"} err="failed to get container status \"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104\": rpc error: code = NotFound desc = could not find container \"b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104\": container with ID starting with b8ae8fb824385de454be4a85baa10b336726dec6466d6da51ba6632a0e0fa104 not found: ID does not exist" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.337950 4592 scope.go:117] "RemoveContainer" containerID="1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728" Sep 29 17:04:13 crc kubenswrapper[4592]: E0929 17:04:13.338202 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728\": container with ID starting with 1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728 not found: ID does not exist" containerID="1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728" Sep 29 17:04:13 crc kubenswrapper[4592]: I0929 17:04:13.338226 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728"} err="failed to get container status \"1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728\": rpc error: code = NotFound desc = could not find container \"1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728\": container with ID starting with 1d75f2abb73f5c665f7a8fcc6c955f9595c8912f47a5ef2165258e52dffdf728 not found: ID does not exist" Sep 29 17:04:15 crc kubenswrapper[4592]: I0929 17:04:15.189523 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" path="/var/lib/kubelet/pods/e92acd36-14cf-4a40-a9ab-e8c6494cadad/volumes" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112571 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf"] Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112766 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="extract-content" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112784 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="extract-content" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112803 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="pull" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112810 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="pull" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112818 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="extract" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112847 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="extract" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112859 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="extract-utilities" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112865 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="extract-utilities" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112875 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="util" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112882 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="util" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112893 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112900 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" Sep 29 17:04:16 crc kubenswrapper[4592]: E0929 17:04:16.112913 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="registry-server" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.112920 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="registry-server" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.113022 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e92acd36-14cf-4a40-a9ab-e8c6494cadad" containerName="registry-server" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.113033 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c7e075-9f35-4418-b416-d5839c9d6b88" containerName="extract" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.113043 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e586a2e2-918f-40e6-b7eb-9e937dd20c32" containerName="console" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.113476 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.117709 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.117802 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-7gvwz" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.117942 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.117958 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.118080 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.174494 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf"] Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.262766 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-apiservice-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.263040 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-webhook-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.263088 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxp44\" (UniqueName: \"kubernetes.io/projected/764035e6-f447-4e19-a17f-c334e6270ba6-kube-api-access-qxp44\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.367651 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb"] Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.368317 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.369738 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-apiservice-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.369810 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-webhook-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.369829 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxp44\" (UniqueName: \"kubernetes.io/projected/764035e6-f447-4e19-a17f-c334e6270ba6-kube-api-access-qxp44\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.374897 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.374952 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-8hjsh" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.374987 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.375624 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-webhook-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.376771 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/764035e6-f447-4e19-a17f-c334e6270ba6-apiservice-cert\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.402774 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb"] Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.412109 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxp44\" (UniqueName: \"kubernetes.io/projected/764035e6-f447-4e19-a17f-c334e6270ba6-kube-api-access-qxp44\") pod \"metallb-operator-controller-manager-5b64bfcc84-9fvrf\" (UID: \"764035e6-f447-4e19-a17f-c334e6270ba6\") " pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.428077 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.470773 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-webhook-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.470831 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxkrs\" (UniqueName: \"kubernetes.io/projected/7290d1e2-eecb-4663-8c34-66c35acc0726-kube-api-access-pxkrs\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.470893 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-apiservice-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.581659 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-webhook-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.581727 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxkrs\" (UniqueName: \"kubernetes.io/projected/7290d1e2-eecb-4663-8c34-66c35acc0726-kube-api-access-pxkrs\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.581836 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-apiservice-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.587007 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-apiservice-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.590764 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7290d1e2-eecb-4663-8c34-66c35acc0726-webhook-cert\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.609987 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxkrs\" (UniqueName: \"kubernetes.io/projected/7290d1e2-eecb-4663-8c34-66c35acc0726-kube-api-access-pxkrs\") pod \"metallb-operator-webhook-server-5c7645bc9b-vdzjb\" (UID: \"7290d1e2-eecb-4663-8c34-66c35acc0726\") " pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.796015 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:16 crc kubenswrapper[4592]: I0929 17:04:16.957749 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf"] Sep 29 17:04:16 crc kubenswrapper[4592]: W0929 17:04:16.963436 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod764035e6_f447_4e19_a17f_c334e6270ba6.slice/crio-2997dd6b23911a9722fa1f365a472fc80670f4b75518db408881b06240b05566 WatchSource:0}: Error finding container 2997dd6b23911a9722fa1f365a472fc80670f4b75518db408881b06240b05566: Status 404 returned error can't find the container with id 2997dd6b23911a9722fa1f365a472fc80670f4b75518db408881b06240b05566 Sep 29 17:04:17 crc kubenswrapper[4592]: I0929 17:04:17.252446 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb"] Sep 29 17:04:17 crc kubenswrapper[4592]: W0929 17:04:17.260318 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7290d1e2_eecb_4663_8c34_66c35acc0726.slice/crio-b4d64c5bc0dc39cf6e0894d92e0a567a433b3777ba4fd9b684abf5d1042d5b90 WatchSource:0}: Error finding container b4d64c5bc0dc39cf6e0894d92e0a567a433b3777ba4fd9b684abf5d1042d5b90: Status 404 returned error can't find the container with id b4d64c5bc0dc39cf6e0894d92e0a567a433b3777ba4fd9b684abf5d1042d5b90 Sep 29 17:04:17 crc kubenswrapper[4592]: I0929 17:04:17.284270 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" event={"ID":"764035e6-f447-4e19-a17f-c334e6270ba6","Type":"ContainerStarted","Data":"2997dd6b23911a9722fa1f365a472fc80670f4b75518db408881b06240b05566"} Sep 29 17:04:17 crc kubenswrapper[4592]: I0929 17:04:17.285655 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" event={"ID":"7290d1e2-eecb-4663-8c34-66c35acc0726","Type":"ContainerStarted","Data":"b4d64c5bc0dc39cf6e0894d92e0a567a433b3777ba4fd9b684abf5d1042d5b90"} Sep 29 17:04:21 crc kubenswrapper[4592]: I0929 17:04:21.316768 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" event={"ID":"764035e6-f447-4e19-a17f-c334e6270ba6","Type":"ContainerStarted","Data":"995d9c44966f60e9c4129da0dff4a1c366824094633c7588ec7ad6f3344cde07"} Sep 29 17:04:21 crc kubenswrapper[4592]: I0929 17:04:21.320179 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:21 crc kubenswrapper[4592]: I0929 17:04:21.347556 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" podStartSLOduration=1.655764692 podStartE2EDuration="5.347537825s" podCreationTimestamp="2025-09-29 17:04:16 +0000 UTC" firstStartedPulling="2025-09-29 17:04:16.970801992 +0000 UTC m=+787.118579673" lastFinishedPulling="2025-09-29 17:04:20.662575135 +0000 UTC m=+790.810352806" observedRunningTime="2025-09-29 17:04:21.345343226 +0000 UTC m=+791.493120907" watchObservedRunningTime="2025-09-29 17:04:21.347537825 +0000 UTC m=+791.495315506" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.114797 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.116340 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.141370 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.288418 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.288520 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.288540 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr2wq\" (UniqueName: \"kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.335700 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" event={"ID":"7290d1e2-eecb-4663-8c34-66c35acc0726","Type":"ContainerStarted","Data":"60ae0d8d794098f3c7bef852d9f0c8229f103721605fc1e632f13d7f3665356e"} Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.335755 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.368880 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" podStartSLOduration=1.9276982070000002 podStartE2EDuration="7.368860726s" podCreationTimestamp="2025-09-29 17:04:16 +0000 UTC" firstStartedPulling="2025-09-29 17:04:17.263371102 +0000 UTC m=+787.411148783" lastFinishedPulling="2025-09-29 17:04:22.704533621 +0000 UTC m=+792.852311302" observedRunningTime="2025-09-29 17:04:23.367082519 +0000 UTC m=+793.514860210" watchObservedRunningTime="2025-09-29 17:04:23.368860726 +0000 UTC m=+793.516638407" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.390021 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.390516 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.390764 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.391043 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.391086 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr2wq\" (UniqueName: \"kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.425430 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr2wq\" (UniqueName: \"kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq\") pod \"community-operators-rqwbr\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.431419 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:23 crc kubenswrapper[4592]: I0929 17:04:23.803484 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:24 crc kubenswrapper[4592]: I0929 17:04:24.341875 4592 generic.go:334] "Generic (PLEG): container finished" podID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerID="943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec" exitCode=0 Sep 29 17:04:24 crc kubenswrapper[4592]: I0929 17:04:24.341978 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerDied","Data":"943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec"} Sep 29 17:04:24 crc kubenswrapper[4592]: I0929 17:04:24.343235 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerStarted","Data":"16d2c7181f8237c9fdb50c1f3428d8e38d5eb5ce5e9c94c56d2b949c2c8aea42"} Sep 29 17:04:25 crc kubenswrapper[4592]: I0929 17:04:25.349677 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerStarted","Data":"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202"} Sep 29 17:04:26 crc kubenswrapper[4592]: I0929 17:04:26.355878 4592 generic.go:334] "Generic (PLEG): container finished" podID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerID="32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202" exitCode=0 Sep 29 17:04:26 crc kubenswrapper[4592]: I0929 17:04:26.355929 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerDied","Data":"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202"} Sep 29 17:04:27 crc kubenswrapper[4592]: I0929 17:04:27.364764 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerStarted","Data":"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9"} Sep 29 17:04:27 crc kubenswrapper[4592]: I0929 17:04:27.395357 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rqwbr" podStartSLOduration=1.9160090520000002 podStartE2EDuration="4.395333432s" podCreationTimestamp="2025-09-29 17:04:23 +0000 UTC" firstStartedPulling="2025-09-29 17:04:24.343418423 +0000 UTC m=+794.491196104" lastFinishedPulling="2025-09-29 17:04:26.822742803 +0000 UTC m=+796.970520484" observedRunningTime="2025-09-29 17:04:27.388229912 +0000 UTC m=+797.536007643" watchObservedRunningTime="2025-09-29 17:04:27.395333432 +0000 UTC m=+797.543111133" Sep 29 17:04:30 crc kubenswrapper[4592]: I0929 17:04:30.883773 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:04:30 crc kubenswrapper[4592]: I0929 17:04:30.884040 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:04:33 crc kubenswrapper[4592]: I0929 17:04:33.433027 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:33 crc kubenswrapper[4592]: I0929 17:04:33.433436 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:33 crc kubenswrapper[4592]: I0929 17:04:33.475325 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:34 crc kubenswrapper[4592]: I0929 17:04:34.453053 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:35 crc kubenswrapper[4592]: I0929 17:04:35.506463 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:36 crc kubenswrapper[4592]: I0929 17:04:36.411821 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rqwbr" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="registry-server" containerID="cri-o://e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9" gracePeriod=2 Sep 29 17:04:36 crc kubenswrapper[4592]: I0929 17:04:36.800948 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5c7645bc9b-vdzjb" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.366244 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.430394 4592 generic.go:334] "Generic (PLEG): container finished" podID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerID="e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9" exitCode=0 Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.430434 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerDied","Data":"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9"} Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.430459 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rqwbr" event={"ID":"fe88c758-d31d-466a-98b8-41d8ed5668ce","Type":"ContainerDied","Data":"16d2c7181f8237c9fdb50c1f3428d8e38d5eb5ce5e9c94c56d2b949c2c8aea42"} Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.430475 4592 scope.go:117] "RemoveContainer" containerID="e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.430638 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rqwbr" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.453892 4592 scope.go:117] "RemoveContainer" containerID="32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.475728 4592 scope.go:117] "RemoveContainer" containerID="943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.481467 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities\") pod \"fe88c758-d31d-466a-98b8-41d8ed5668ce\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.481522 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr2wq\" (UniqueName: \"kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq\") pod \"fe88c758-d31d-466a-98b8-41d8ed5668ce\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.481606 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content\") pod \"fe88c758-d31d-466a-98b8-41d8ed5668ce\" (UID: \"fe88c758-d31d-466a-98b8-41d8ed5668ce\") " Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.482607 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities" (OuterVolumeSpecName: "utilities") pod "fe88c758-d31d-466a-98b8-41d8ed5668ce" (UID: "fe88c758-d31d-466a-98b8-41d8ed5668ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.494787 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq" (OuterVolumeSpecName: "kube-api-access-lr2wq") pod "fe88c758-d31d-466a-98b8-41d8ed5668ce" (UID: "fe88c758-d31d-466a-98b8-41d8ed5668ce"). InnerVolumeSpecName "kube-api-access-lr2wq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.503325 4592 scope.go:117] "RemoveContainer" containerID="e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9" Sep 29 17:04:37 crc kubenswrapper[4592]: E0929 17:04:37.503905 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9\": container with ID starting with e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9 not found: ID does not exist" containerID="e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.504024 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9"} err="failed to get container status \"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9\": rpc error: code = NotFound desc = could not find container \"e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9\": container with ID starting with e979229be6049cae2680c3e1607359a53ec4ff75dc2874645b789c8aea0c51c9 not found: ID does not exist" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.504126 4592 scope.go:117] "RemoveContainer" containerID="32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202" Sep 29 17:04:37 crc kubenswrapper[4592]: E0929 17:04:37.505172 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202\": container with ID starting with 32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202 not found: ID does not exist" containerID="32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.505211 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202"} err="failed to get container status \"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202\": rpc error: code = NotFound desc = could not find container \"32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202\": container with ID starting with 32bf6cfa6eef5495c7eb118bb421c6dd523cc544a7551503ab37ce6d8afc1202 not found: ID does not exist" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.505237 4592 scope.go:117] "RemoveContainer" containerID="943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec" Sep 29 17:04:37 crc kubenswrapper[4592]: E0929 17:04:37.507638 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec\": container with ID starting with 943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec not found: ID does not exist" containerID="943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.507673 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec"} err="failed to get container status \"943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec\": rpc error: code = NotFound desc = could not find container \"943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec\": container with ID starting with 943d03290b56a683b64907cdf1d78f7c17b194123ab16c1fa14fd99325b8fbec not found: ID does not exist" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.556016 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe88c758-d31d-466a-98b8-41d8ed5668ce" (UID: "fe88c758-d31d-466a-98b8-41d8ed5668ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.582658 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.582858 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr2wq\" (UniqueName: \"kubernetes.io/projected/fe88c758-d31d-466a-98b8-41d8ed5668ce-kube-api-access-lr2wq\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.582918 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe88c758-d31d-466a-98b8-41d8ed5668ce-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.764079 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:37 crc kubenswrapper[4592]: I0929 17:04:37.767681 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rqwbr"] Sep 29 17:04:39 crc kubenswrapper[4592]: I0929 17:04:39.190690 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" path="/var/lib/kubelet/pods/fe88c758-d31d-466a-98b8-41d8ed5668ce/volumes" Sep 29 17:04:56 crc kubenswrapper[4592]: I0929 17:04:56.431351 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5b64bfcc84-9fvrf" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.181100 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-drlk6"] Sep 29 17:04:57 crc kubenswrapper[4592]: E0929 17:04:57.181398 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="extract-content" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.181423 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="extract-content" Sep 29 17:04:57 crc kubenswrapper[4592]: E0929 17:04:57.181443 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="registry-server" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.181453 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="registry-server" Sep 29 17:04:57 crc kubenswrapper[4592]: E0929 17:04:57.181472 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="extract-utilities" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.181481 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="extract-utilities" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.181605 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe88c758-d31d-466a-98b8-41d8ed5668ce" containerName="registry-server" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.184063 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.190537 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.190710 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.190780 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jnxts" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.214568 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2"] Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.219182 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.224936 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.229100 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2"] Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.320270 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-z794s"] Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.321394 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.325910 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326112 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326127 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-swlk4" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326195 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326498 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfzm9\" (UniqueName: \"kubernetes.io/projected/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-kube-api-access-jfzm9\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326554 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-conf\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326616 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.326999 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-startup\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.327363 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-sockets\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.327436 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2c5260c1-19c0-4d8b-b659-5a09f3a887da-cert\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.327465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-reloader\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.327718 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics-certs\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.327775 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm5dt\" (UniqueName: \"kubernetes.io/projected/2c5260c1-19c0-4d8b-b659-5a09f3a887da-kube-api-access-wm5dt\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.331724 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-jzgm6"] Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.332910 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.338899 4592 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.354567 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jzgm6"] Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.428896 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.428958 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-conf\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.428983 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429039 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-startup\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429065 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw2lc\" (UniqueName: \"kubernetes.io/projected/8634039b-db7c-46c2-a140-a746270aa768-kube-api-access-bw2lc\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429090 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-metrics-certs\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429116 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-sockets\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429160 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2c5260c1-19c0-4d8b-b659-5a09f3a887da-cert\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429194 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-reloader\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429220 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8634039b-db7c-46c2-a140-a746270aa768-metallb-excludel2\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429254 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics-certs\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429281 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm5dt\" (UniqueName: \"kubernetes.io/projected/2c5260c1-19c0-4d8b-b659-5a09f3a887da-kube-api-access-wm5dt\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429319 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfzm9\" (UniqueName: \"kubernetes.io/projected/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-kube-api-access-jfzm9\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.429530 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.430274 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-conf\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.430433 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-startup\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.430636 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-reloader\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.436836 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-metrics-certs\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.436890 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2c5260c1-19c0-4d8b-b659-5a09f3a887da-cert\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.440930 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-frr-sockets\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.453494 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm5dt\" (UniqueName: \"kubernetes.io/projected/2c5260c1-19c0-4d8b-b659-5a09f3a887da-kube-api-access-wm5dt\") pod \"frr-k8s-webhook-server-5478bdb765-lrvw2\" (UID: \"2c5260c1-19c0-4d8b-b659-5a09f3a887da\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.457613 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfzm9\" (UniqueName: \"kubernetes.io/projected/8fa3e452-ce03-4b76-812f-cc9ff86f9b10-kube-api-access-jfzm9\") pod \"frr-k8s-drlk6\" (UID: \"8fa3e452-ce03-4b76-812f-cc9ff86f9b10\") " pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.501454 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.531047 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw2lc\" (UniqueName: \"kubernetes.io/projected/8634039b-db7c-46c2-a140-a746270aa768-kube-api-access-bw2lc\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.531327 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-metrics-certs\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.531465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2xdf\" (UniqueName: \"kubernetes.io/projected/560cbf05-ef23-4767-bd5c-eabd7f3eb864-kube-api-access-d2xdf\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.531690 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8634039b-db7c-46c2-a140-a746270aa768-metallb-excludel2\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.531903 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-metrics-certs\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.532165 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-cert\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.532344 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: E0929 17:04:57.532580 4592 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 17:04:57 crc kubenswrapper[4592]: E0929 17:04:57.532807 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist podName:8634039b-db7c-46c2-a140-a746270aa768 nodeName:}" failed. No retries permitted until 2025-09-29 17:04:58.032790833 +0000 UTC m=+828.180568514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist") pod "speaker-z794s" (UID: "8634039b-db7c-46c2-a140-a746270aa768") : secret "metallb-memberlist" not found Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.532644 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/8634039b-db7c-46c2-a140-a746270aa768-metallb-excludel2\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.534638 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-metrics-certs\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.541600 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.555897 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw2lc\" (UniqueName: \"kubernetes.io/projected/8634039b-db7c-46c2-a140-a746270aa768-kube-api-access-bw2lc\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.633833 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-cert\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.633915 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2xdf\" (UniqueName: \"kubernetes.io/projected/560cbf05-ef23-4767-bd5c-eabd7f3eb864-kube-api-access-d2xdf\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.633940 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-metrics-certs\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.636791 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-cert\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.636919 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/560cbf05-ef23-4767-bd5c-eabd7f3eb864-metrics-certs\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.655682 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2xdf\" (UniqueName: \"kubernetes.io/projected/560cbf05-ef23-4767-bd5c-eabd7f3eb864-kube-api-access-d2xdf\") pod \"controller-5d688f5ffc-jzgm6\" (UID: \"560cbf05-ef23-4767-bd5c-eabd7f3eb864\") " pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:57 crc kubenswrapper[4592]: I0929 17:04:57.703616 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.003117 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2"] Sep 29 17:04:58 crc kubenswrapper[4592]: W0929 17:04:58.006008 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c5260c1_19c0_4d8b_b659_5a09f3a887da.slice/crio-c3a3ef12db9c09832a7096576f2c587134e35923cb3c64eb5ff758787adb4724 WatchSource:0}: Error finding container c3a3ef12db9c09832a7096576f2c587134e35923cb3c64eb5ff758787adb4724: Status 404 returned error can't find the container with id c3a3ef12db9c09832a7096576f2c587134e35923cb3c64eb5ff758787adb4724 Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.037733 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:58 crc kubenswrapper[4592]: E0929 17:04:58.037925 4592 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Sep 29 17:04:58 crc kubenswrapper[4592]: E0929 17:04:58.038013 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist podName:8634039b-db7c-46c2-a140-a746270aa768 nodeName:}" failed. No retries permitted until 2025-09-29 17:04:59.037987963 +0000 UTC m=+829.185765644 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist") pod "speaker-z794s" (UID: "8634039b-db7c-46c2-a140-a746270aa768") : secret "metallb-memberlist" not found Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.124940 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-jzgm6"] Sep 29 17:04:58 crc kubenswrapper[4592]: W0929 17:04:58.131828 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod560cbf05_ef23_4767_bd5c_eabd7f3eb864.slice/crio-2c86ec8434c224cac77addc7803eaba66efe2cffebcdd8cc2b9d5cd52ca48604 WatchSource:0}: Error finding container 2c86ec8434c224cac77addc7803eaba66efe2cffebcdd8cc2b9d5cd52ca48604: Status 404 returned error can't find the container with id 2c86ec8434c224cac77addc7803eaba66efe2cffebcdd8cc2b9d5cd52ca48604 Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.538451 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" event={"ID":"2c5260c1-19c0-4d8b-b659-5a09f3a887da","Type":"ContainerStarted","Data":"c3a3ef12db9c09832a7096576f2c587134e35923cb3c64eb5ff758787adb4724"} Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.540708 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jzgm6" event={"ID":"560cbf05-ef23-4767-bd5c-eabd7f3eb864","Type":"ContainerStarted","Data":"aba7133b44c3b90ba7c83dc58a342fc94f93ade9c55166be5a786e045daad1d3"} Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.540753 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jzgm6" event={"ID":"560cbf05-ef23-4767-bd5c-eabd7f3eb864","Type":"ContainerStarted","Data":"d2db46fbb034910b95615d1b8518ce99021c95faf4ca41cf245fa8049c38bd37"} Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.540764 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-jzgm6" event={"ID":"560cbf05-ef23-4767-bd5c-eabd7f3eb864","Type":"ContainerStarted","Data":"2c86ec8434c224cac77addc7803eaba66efe2cffebcdd8cc2b9d5cd52ca48604"} Sep 29 17:04:58 crc kubenswrapper[4592]: I0929 17:04:58.542458 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"74cb5117c4a729d8fcb97d712e19e50c780ad2b94ff48848c43b8443d3cab0a2"} Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.049840 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.055642 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/8634039b-db7c-46c2-a140-a746270aa768-memberlist\") pod \"speaker-z794s\" (UID: \"8634039b-db7c-46c2-a140-a746270aa768\") " pod="metallb-system/speaker-z794s" Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.138657 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-z794s" Sep 29 17:04:59 crc kubenswrapper[4592]: W0929 17:04:59.166672 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8634039b_db7c_46c2_a140_a746270aa768.slice/crio-22cfcdab867af9e4c549b8351e3d6eaa252844188d46f39cbea3c3408d6cc0f1 WatchSource:0}: Error finding container 22cfcdab867af9e4c549b8351e3d6eaa252844188d46f39cbea3c3408d6cc0f1: Status 404 returned error can't find the container with id 22cfcdab867af9e4c549b8351e3d6eaa252844188d46f39cbea3c3408d6cc0f1 Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.548042 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z794s" event={"ID":"8634039b-db7c-46c2-a140-a746270aa768","Type":"ContainerStarted","Data":"fb602b693d765f4a674de09e48dccefc403d7d588ebda79e5e3515b2fd62cc38"} Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.548089 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z794s" event={"ID":"8634039b-db7c-46c2-a140-a746270aa768","Type":"ContainerStarted","Data":"22cfcdab867af9e4c549b8351e3d6eaa252844188d46f39cbea3c3408d6cc0f1"} Sep 29 17:04:59 crc kubenswrapper[4592]: I0929 17:04:59.548196 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.575135 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-z794s" event={"ID":"8634039b-db7c-46c2-a140-a746270aa768","Type":"ContainerStarted","Data":"f008c3a0c6259687c13fdb26b04a5104a6e852a93716e107ce82de8ce4bf9af4"} Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.575443 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-z794s" Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.599935 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-z794s" podStartSLOduration=3.599919349 podStartE2EDuration="3.599919349s" podCreationTimestamp="2025-09-29 17:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:05:00.598069154 +0000 UTC m=+830.745846835" watchObservedRunningTime="2025-09-29 17:05:00.599919349 +0000 UTC m=+830.747697030" Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.601862 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-jzgm6" podStartSLOduration=3.6018531449999998 podStartE2EDuration="3.601853145s" podCreationTimestamp="2025-09-29 17:04:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:04:59.565797523 +0000 UTC m=+829.713575204" watchObservedRunningTime="2025-09-29 17:05:00.601853145 +0000 UTC m=+830.749630826" Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.883073 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:05:00 crc kubenswrapper[4592]: I0929 17:05:00.883123 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.169286 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.170716 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.195572 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.308402 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.308531 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.308568 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jglj\" (UniqueName: \"kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.413761 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.413814 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jglj\" (UniqueName: \"kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.413851 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.414240 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.414431 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.435960 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jglj\" (UniqueName: \"kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj\") pod \"redhat-marketplace-48s75\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:01 crc kubenswrapper[4592]: I0929 17:05:01.487415 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:02 crc kubenswrapper[4592]: I0929 17:05:02.085632 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:02 crc kubenswrapper[4592]: I0929 17:05:02.591663 4592 generic.go:334] "Generic (PLEG): container finished" podID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerID="5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b" exitCode=0 Sep 29 17:05:02 crc kubenswrapper[4592]: I0929 17:05:02.591945 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerDied","Data":"5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b"} Sep 29 17:05:02 crc kubenswrapper[4592]: I0929 17:05:02.591976 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerStarted","Data":"58d408ee5c0d4872069232096eb189e8127775bdda45f21834c69a186152d83e"} Sep 29 17:05:04 crc kubenswrapper[4592]: I0929 17:05:04.607221 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerStarted","Data":"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2"} Sep 29 17:05:05 crc kubenswrapper[4592]: I0929 17:05:05.633692 4592 generic.go:334] "Generic (PLEG): container finished" podID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerID="18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2" exitCode=0 Sep 29 17:05:05 crc kubenswrapper[4592]: I0929 17:05:05.633729 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerDied","Data":"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2"} Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.652840 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerStarted","Data":"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356"} Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.656646 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" event={"ID":"2c5260c1-19c0-4d8b-b659-5a09f3a887da","Type":"ContainerStarted","Data":"f3bda61a8835b6701879b48662db4ef679e4024e6985c6959dd7ab2e7a7b4664"} Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.656746 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.660700 4592 generic.go:334] "Generic (PLEG): container finished" podID="8fa3e452-ce03-4b76-812f-cc9ff86f9b10" containerID="fa96a7030dec9a83d36201414e7dc694623a601f5e419dc4085b86672ab945c7" exitCode=0 Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.660769 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerDied","Data":"fa96a7030dec9a83d36201414e7dc694623a601f5e419dc4085b86672ab945c7"} Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.676636 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-48s75" podStartSLOduration=2.451091441 podStartE2EDuration="7.676617544s" podCreationTimestamp="2025-09-29 17:05:01 +0000 UTC" firstStartedPulling="2025-09-29 17:05:02.593401843 +0000 UTC m=+832.741179524" lastFinishedPulling="2025-09-29 17:05:07.818927946 +0000 UTC m=+837.966705627" observedRunningTime="2025-09-29 17:05:08.675056798 +0000 UTC m=+838.822834479" watchObservedRunningTime="2025-09-29 17:05:08.676617544 +0000 UTC m=+838.824395225" Sep 29 17:05:08 crc kubenswrapper[4592]: I0929 17:05:08.730323 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" podStartSLOduration=2.055796341 podStartE2EDuration="11.730302444s" podCreationTimestamp="2025-09-29 17:04:57 +0000 UTC" firstStartedPulling="2025-09-29 17:04:58.007968615 +0000 UTC m=+828.155746296" lastFinishedPulling="2025-09-29 17:05:07.682474718 +0000 UTC m=+837.830252399" observedRunningTime="2025-09-29 17:05:08.723557796 +0000 UTC m=+838.871335477" watchObservedRunningTime="2025-09-29 17:05:08.730302444 +0000 UTC m=+838.878080125" Sep 29 17:05:09 crc kubenswrapper[4592]: I0929 17:05:09.144867 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-z794s" Sep 29 17:05:09 crc kubenswrapper[4592]: I0929 17:05:09.668685 4592 generic.go:334] "Generic (PLEG): container finished" podID="8fa3e452-ce03-4b76-812f-cc9ff86f9b10" containerID="f5a47b39ceb486ae441b5158031d3a6be19a7058d493fbc448de010cd788646e" exitCode=0 Sep 29 17:05:09 crc kubenswrapper[4592]: I0929 17:05:09.668755 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerDied","Data":"f5a47b39ceb486ae441b5158031d3a6be19a7058d493fbc448de010cd788646e"} Sep 29 17:05:10 crc kubenswrapper[4592]: I0929 17:05:10.678187 4592 generic.go:334] "Generic (PLEG): container finished" podID="8fa3e452-ce03-4b76-812f-cc9ff86f9b10" containerID="5c7dc99635fd735b1e4212bddc3191ba4fa703578cedfcc12bf4fbe616a60e72" exitCode=0 Sep 29 17:05:10 crc kubenswrapper[4592]: I0929 17:05:10.678546 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerDied","Data":"5c7dc99635fd735b1e4212bddc3191ba4fa703578cedfcc12bf4fbe616a60e72"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.488450 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.488798 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.622180 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688062 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"f567f1230b7325b748c151d88dfe38d6715afdd4457bda8a7e6d9e8037c7956b"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688130 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"f9e70e16f1154d5e78958feeb21625d42ef03d97b269b27385c83807459cf03e"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688192 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"682469e330af29773a96f5b3c0e6049b0a6f2bd4ce1b41b1a87b99ce35e8022a"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688208 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"897e8fbf9c4c79301eae62b792a5a2f9f5d370f053c0971387d0296594b474ae"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688220 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"2e6edfee2ec387a455d296594db601f71f719d8239f2c78226096e884d2223a9"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.688231 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-drlk6" event={"ID":"8fa3e452-ce03-4b76-812f-cc9ff86f9b10","Type":"ContainerStarted","Data":"97faf776c6c3f6947fff1a2f068e195cb82177ccf3e10f7151c7e3f1ca3165b5"} Sep 29 17:05:11 crc kubenswrapper[4592]: I0929 17:05:11.713090 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-drlk6" podStartSLOduration=4.696671776 podStartE2EDuration="14.713071123s" podCreationTimestamp="2025-09-29 17:04:57 +0000 UTC" firstStartedPulling="2025-09-29 17:04:57.801616813 +0000 UTC m=+827.949394494" lastFinishedPulling="2025-09-29 17:05:07.81801616 +0000 UTC m=+837.965793841" observedRunningTime="2025-09-29 17:05:11.711463056 +0000 UTC m=+841.859240737" watchObservedRunningTime="2025-09-29 17:05:11.713071123 +0000 UTC m=+841.860848804" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.297613 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.298428 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.300186 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.300439 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-xgb2j" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.306001 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.326905 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.374846 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg95w\" (UniqueName: \"kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w\") pod \"openstack-operator-index-f7xhh\" (UID: \"4a84df25-2b3e-471c-911f-0a2e050b350b\") " pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.476600 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg95w\" (UniqueName: \"kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w\") pod \"openstack-operator-index-f7xhh\" (UID: \"4a84df25-2b3e-471c-911f-0a2e050b350b\") " pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.504684 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.504911 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg95w\" (UniqueName: \"kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w\") pod \"openstack-operator-index-f7xhh\" (UID: \"4a84df25-2b3e-471c-911f-0a2e050b350b\") " pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.554725 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.618705 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:12 crc kubenswrapper[4592]: I0929 17:05:12.695729 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:05:13 crc kubenswrapper[4592]: I0929 17:05:13.025928 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:13 crc kubenswrapper[4592]: W0929 17:05:13.035240 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a84df25_2b3e_471c_911f_0a2e050b350b.slice/crio-315627324f081e27bd4df0048d1c4a502d284bb120729fa82d0ee4fe9c5aa2eb WatchSource:0}: Error finding container 315627324f081e27bd4df0048d1c4a502d284bb120729fa82d0ee4fe9c5aa2eb: Status 404 returned error can't find the container with id 315627324f081e27bd4df0048d1c4a502d284bb120729fa82d0ee4fe9c5aa2eb Sep 29 17:05:13 crc kubenswrapper[4592]: I0929 17:05:13.701798 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f7xhh" event={"ID":"4a84df25-2b3e-471c-911f-0a2e050b350b","Type":"ContainerStarted","Data":"315627324f081e27bd4df0048d1c4a502d284bb120729fa82d0ee4fe9c5aa2eb"} Sep 29 17:05:15 crc kubenswrapper[4592]: I0929 17:05:15.652990 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.258756 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-bc9lr"] Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.259979 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.273442 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-bc9lr"] Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.276856 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hhpm\" (UniqueName: \"kubernetes.io/projected/99fc0436-2ce9-4df7-ad2b-4ddb6dff9983-kube-api-access-6hhpm\") pod \"openstack-operator-index-bc9lr\" (UID: \"99fc0436-2ce9-4df7-ad2b-4ddb6dff9983\") " pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.378544 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hhpm\" (UniqueName: \"kubernetes.io/projected/99fc0436-2ce9-4df7-ad2b-4ddb6dff9983-kube-api-access-6hhpm\") pod \"openstack-operator-index-bc9lr\" (UID: \"99fc0436-2ce9-4df7-ad2b-4ddb6dff9983\") " pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.399553 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hhpm\" (UniqueName: \"kubernetes.io/projected/99fc0436-2ce9-4df7-ad2b-4ddb6dff9983-kube-api-access-6hhpm\") pod \"openstack-operator-index-bc9lr\" (UID: \"99fc0436-2ce9-4df7-ad2b-4ddb6dff9983\") " pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:16 crc kubenswrapper[4592]: I0929 17:05:16.583746 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.209825 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-bc9lr"] Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.709634 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-jzgm6" Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.727737 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f7xhh" event={"ID":"4a84df25-2b3e-471c-911f-0a2e050b350b","Type":"ContainerStarted","Data":"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40"} Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.727881 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-f7xhh" podUID="4a84df25-2b3e-471c-911f-0a2e050b350b" containerName="registry-server" containerID="cri-o://11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40" gracePeriod=2 Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.733197 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-bc9lr" event={"ID":"99fc0436-2ce9-4df7-ad2b-4ddb6dff9983","Type":"ContainerStarted","Data":"3835481f73e2f696daf8ed4c368536501e6ae6d98bd8fcf04cbe06b3f29c3850"} Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.733444 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-bc9lr" event={"ID":"99fc0436-2ce9-4df7-ad2b-4ddb6dff9983","Type":"ContainerStarted","Data":"934ec3a711ef7882ee6e94d11be836e025c051862430918d7146059d44aa4029"} Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.748067 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-f7xhh" podStartSLOduration=1.7050298860000002 podStartE2EDuration="5.748050698s" podCreationTimestamp="2025-09-29 17:05:12 +0000 UTC" firstStartedPulling="2025-09-29 17:05:13.037077281 +0000 UTC m=+843.184854962" lastFinishedPulling="2025-09-29 17:05:17.080098093 +0000 UTC m=+847.227875774" observedRunningTime="2025-09-29 17:05:17.747064363 +0000 UTC m=+847.894842044" watchObservedRunningTime="2025-09-29 17:05:17.748050698 +0000 UTC m=+847.895828379" Sep 29 17:05:17 crc kubenswrapper[4592]: I0929 17:05:17.763562 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-bc9lr" podStartSLOduration=1.603525133 podStartE2EDuration="1.763524117s" podCreationTimestamp="2025-09-29 17:05:16 +0000 UTC" firstStartedPulling="2025-09-29 17:05:17.236503893 +0000 UTC m=+847.384281574" lastFinishedPulling="2025-09-29 17:05:17.396502877 +0000 UTC m=+847.544280558" observedRunningTime="2025-09-29 17:05:17.76101017 +0000 UTC m=+847.908787861" watchObservedRunningTime="2025-09-29 17:05:17.763524117 +0000 UTC m=+847.911301798" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.131747 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.305445 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg95w\" (UniqueName: \"kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w\") pod \"4a84df25-2b3e-471c-911f-0a2e050b350b\" (UID: \"4a84df25-2b3e-471c-911f-0a2e050b350b\") " Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.310485 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w" (OuterVolumeSpecName: "kube-api-access-mg95w") pod "4a84df25-2b3e-471c-911f-0a2e050b350b" (UID: "4a84df25-2b3e-471c-911f-0a2e050b350b"). InnerVolumeSpecName "kube-api-access-mg95w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.406669 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg95w\" (UniqueName: \"kubernetes.io/projected/4a84df25-2b3e-471c-911f-0a2e050b350b-kube-api-access-mg95w\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.738535 4592 generic.go:334] "Generic (PLEG): container finished" podID="4a84df25-2b3e-471c-911f-0a2e050b350b" containerID="11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40" exitCode=0 Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.738630 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-f7xhh" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.739032 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f7xhh" event={"ID":"4a84df25-2b3e-471c-911f-0a2e050b350b","Type":"ContainerDied","Data":"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40"} Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.739643 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-f7xhh" event={"ID":"4a84df25-2b3e-471c-911f-0a2e050b350b","Type":"ContainerDied","Data":"315627324f081e27bd4df0048d1c4a502d284bb120729fa82d0ee4fe9c5aa2eb"} Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.739781 4592 scope.go:117] "RemoveContainer" containerID="11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.763250 4592 scope.go:117] "RemoveContainer" containerID="11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40" Sep 29 17:05:18 crc kubenswrapper[4592]: E0929 17:05:18.763802 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40\": container with ID starting with 11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40 not found: ID does not exist" containerID="11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.763900 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40"} err="failed to get container status \"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40\": rpc error: code = NotFound desc = could not find container \"11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40\": container with ID starting with 11b5b7b2e713052c4cda591276cd50eeaa7b125c7f6d606c1cc2a8fa7ac19b40 not found: ID does not exist" Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.775762 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:18 crc kubenswrapper[4592]: I0929 17:05:18.781338 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-f7xhh"] Sep 29 17:05:19 crc kubenswrapper[4592]: I0929 17:05:19.192678 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a84df25-2b3e-471c-911f-0a2e050b350b" path="/var/lib/kubelet/pods/4a84df25-2b3e-471c-911f-0a2e050b350b/volumes" Sep 29 17:05:21 crc kubenswrapper[4592]: I0929 17:05:21.528807 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:23 crc kubenswrapper[4592]: I0929 17:05:23.851640 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:23 crc kubenswrapper[4592]: I0929 17:05:23.851897 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-48s75" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="registry-server" containerID="cri-o://34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356" gracePeriod=2 Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.306482 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.391690 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jglj\" (UniqueName: \"kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj\") pod \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.397717 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj" (OuterVolumeSpecName: "kube-api-access-9jglj") pod "cf1133e9-dac3-4e20-bfff-52e82025ec9a" (UID: "cf1133e9-dac3-4e20-bfff-52e82025ec9a"). InnerVolumeSpecName "kube-api-access-9jglj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.492591 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities\") pod \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.492653 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content\") pod \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\" (UID: \"cf1133e9-dac3-4e20-bfff-52e82025ec9a\") " Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.492809 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jglj\" (UniqueName: \"kubernetes.io/projected/cf1133e9-dac3-4e20-bfff-52e82025ec9a-kube-api-access-9jglj\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.493609 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities" (OuterVolumeSpecName: "utilities") pod "cf1133e9-dac3-4e20-bfff-52e82025ec9a" (UID: "cf1133e9-dac3-4e20-bfff-52e82025ec9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.503903 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf1133e9-dac3-4e20-bfff-52e82025ec9a" (UID: "cf1133e9-dac3-4e20-bfff-52e82025ec9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.594070 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.594133 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf1133e9-dac3-4e20-bfff-52e82025ec9a-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.776173 4592 generic.go:334] "Generic (PLEG): container finished" podID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerID="34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356" exitCode=0 Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.776213 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerDied","Data":"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356"} Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.776244 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-48s75" event={"ID":"cf1133e9-dac3-4e20-bfff-52e82025ec9a","Type":"ContainerDied","Data":"58d408ee5c0d4872069232096eb189e8127775bdda45f21834c69a186152d83e"} Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.776264 4592 scope.go:117] "RemoveContainer" containerID="34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.776300 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-48s75" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.799903 4592 scope.go:117] "RemoveContainer" containerID="18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.821727 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.825438 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-48s75"] Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.825916 4592 scope.go:117] "RemoveContainer" containerID="5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.841654 4592 scope.go:117] "RemoveContainer" containerID="34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356" Sep 29 17:05:24 crc kubenswrapper[4592]: E0929 17:05:24.842196 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356\": container with ID starting with 34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356 not found: ID does not exist" containerID="34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.842258 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356"} err="failed to get container status \"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356\": rpc error: code = NotFound desc = could not find container \"34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356\": container with ID starting with 34cd78884fcb37b42cb9ca0e210064ec860bcbe0b88a2a28b01401edd9aaf356 not found: ID does not exist" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.842298 4592 scope.go:117] "RemoveContainer" containerID="18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2" Sep 29 17:05:24 crc kubenswrapper[4592]: E0929 17:05:24.842636 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2\": container with ID starting with 18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2 not found: ID does not exist" containerID="18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.842672 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2"} err="failed to get container status \"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2\": rpc error: code = NotFound desc = could not find container \"18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2\": container with ID starting with 18398182ab3d86710d89dccc59c580bc8b9453baf92a6e20aaa416df662489a2 not found: ID does not exist" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.842694 4592 scope.go:117] "RemoveContainer" containerID="5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b" Sep 29 17:05:24 crc kubenswrapper[4592]: E0929 17:05:24.842983 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b\": container with ID starting with 5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b not found: ID does not exist" containerID="5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b" Sep 29 17:05:24 crc kubenswrapper[4592]: I0929 17:05:24.843030 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b"} err="failed to get container status \"5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b\": rpc error: code = NotFound desc = could not find container \"5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b\": container with ID starting with 5a2aacac26d5bb119f4cd182a51af2332ffaf61a7a38897f13685352d1a71a9b not found: ID does not exist" Sep 29 17:05:25 crc kubenswrapper[4592]: I0929 17:05:25.197779 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" path="/var/lib/kubelet/pods/cf1133e9-dac3-4e20-bfff-52e82025ec9a/volumes" Sep 29 17:05:26 crc kubenswrapper[4592]: I0929 17:05:26.585321 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:26 crc kubenswrapper[4592]: I0929 17:05:26.585370 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:26 crc kubenswrapper[4592]: I0929 17:05:26.608488 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:26 crc kubenswrapper[4592]: I0929 17:05:26.816315 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-bc9lr" Sep 29 17:05:27 crc kubenswrapper[4592]: I0929 17:05:27.505024 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-drlk6" Sep 29 17:05:27 crc kubenswrapper[4592]: I0929 17:05:27.549598 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-lrvw2" Sep 29 17:05:30 crc kubenswrapper[4592]: I0929 17:05:30.883266 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:05:30 crc kubenswrapper[4592]: I0929 17:05:30.884359 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:05:30 crc kubenswrapper[4592]: I0929 17:05:30.884449 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:05:30 crc kubenswrapper[4592]: I0929 17:05:30.885227 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:05:30 crc kubenswrapper[4592]: I0929 17:05:30.885312 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae" gracePeriod=600 Sep 29 17:05:31 crc kubenswrapper[4592]: I0929 17:05:31.814320 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae" exitCode=0 Sep 29 17:05:31 crc kubenswrapper[4592]: I0929 17:05:31.814509 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae"} Sep 29 17:05:31 crc kubenswrapper[4592]: I0929 17:05:31.814877 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1"} Sep 29 17:05:31 crc kubenswrapper[4592]: I0929 17:05:31.814898 4592 scope.go:117] "RemoveContainer" containerID="7ed991bd0dfceb7b79c3a45affb7bd94e2ef89c514442956cd38fa47619e5780" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524411 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx"] Sep 29 17:05:32 crc kubenswrapper[4592]: E0929 17:05:32.524693 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="extract-content" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524709 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="extract-content" Sep 29 17:05:32 crc kubenswrapper[4592]: E0929 17:05:32.524724 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a84df25-2b3e-471c-911f-0a2e050b350b" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524732 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a84df25-2b3e-471c-911f-0a2e050b350b" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: E0929 17:05:32.524745 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="extract-utilities" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524752 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="extract-utilities" Sep 29 17:05:32 crc kubenswrapper[4592]: E0929 17:05:32.524771 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524779 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524891 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a84df25-2b3e-471c-911f-0a2e050b350b" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.524907 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf1133e9-dac3-4e20-bfff-52e82025ec9a" containerName="registry-server" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.525827 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.529521 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-lj7jk" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.535185 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx"] Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.695802 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.695846 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmvw4\" (UniqueName: \"kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.695889 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.796689 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.796735 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmvw4\" (UniqueName: \"kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.796782 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.797132 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.797224 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.819285 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmvw4\" (UniqueName: \"kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4\") pod \"9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:32 crc kubenswrapper[4592]: I0929 17:05:32.843586 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:33 crc kubenswrapper[4592]: I0929 17:05:33.238971 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx"] Sep 29 17:05:33 crc kubenswrapper[4592]: W0929 17:05:33.251336 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8bf4afc4_5581_49d5_853c_8309dd3ea1bd.slice/crio-9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24 WatchSource:0}: Error finding container 9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24: Status 404 returned error can't find the container with id 9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24 Sep 29 17:05:33 crc kubenswrapper[4592]: I0929 17:05:33.830327 4592 generic.go:334] "Generic (PLEG): container finished" podID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerID="80435acc225f430d1dc6e6ab2e7be012234b4efb05cc9cfbd0566d216a06eb08" exitCode=0 Sep 29 17:05:33 crc kubenswrapper[4592]: I0929 17:05:33.830372 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" event={"ID":"8bf4afc4-5581-49d5-853c-8309dd3ea1bd","Type":"ContainerDied","Data":"80435acc225f430d1dc6e6ab2e7be012234b4efb05cc9cfbd0566d216a06eb08"} Sep 29 17:05:33 crc kubenswrapper[4592]: I0929 17:05:33.830396 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" event={"ID":"8bf4afc4-5581-49d5-853c-8309dd3ea1bd","Type":"ContainerStarted","Data":"9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24"} Sep 29 17:05:34 crc kubenswrapper[4592]: I0929 17:05:34.837793 4592 generic.go:334] "Generic (PLEG): container finished" podID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerID="6776a31390148784b3b8e9b2fcaa97458a1f32f320e3953c3da3a2d93ff228d4" exitCode=0 Sep 29 17:05:34 crc kubenswrapper[4592]: I0929 17:05:34.837874 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" event={"ID":"8bf4afc4-5581-49d5-853c-8309dd3ea1bd","Type":"ContainerDied","Data":"6776a31390148784b3b8e9b2fcaa97458a1f32f320e3953c3da3a2d93ff228d4"} Sep 29 17:05:35 crc kubenswrapper[4592]: I0929 17:05:35.844552 4592 generic.go:334] "Generic (PLEG): container finished" podID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerID="bb1faad425506e6c21f9e712b79ae352578445ca092140abec4731d03f73c70b" exitCode=0 Sep 29 17:05:35 crc kubenswrapper[4592]: I0929 17:05:35.844613 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" event={"ID":"8bf4afc4-5581-49d5-853c-8309dd3ea1bd","Type":"ContainerDied","Data":"bb1faad425506e6c21f9e712b79ae352578445ca092140abec4731d03f73c70b"} Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.471514 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.474511 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.479468 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.650875 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.651269 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.651345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtk76\" (UniqueName: \"kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.752061 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtk76\" (UniqueName: \"kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.752131 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.752181 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.752694 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.752698 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.787415 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtk76\" (UniqueName: \"kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76\") pod \"certified-operators-7bq9g\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:36 crc kubenswrapper[4592]: I0929 17:05:36.794994 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.203863 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.361709 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmvw4\" (UniqueName: \"kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4\") pod \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.361777 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle\") pod \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.362023 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util\") pod \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\" (UID: \"8bf4afc4-5581-49d5-853c-8309dd3ea1bd\") " Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.362482 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle" (OuterVolumeSpecName: "bundle") pod "8bf4afc4-5581-49d5-853c-8309dd3ea1bd" (UID: "8bf4afc4-5581-49d5-853c-8309dd3ea1bd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.369470 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4" (OuterVolumeSpecName: "kube-api-access-dmvw4") pod "8bf4afc4-5581-49d5-853c-8309dd3ea1bd" (UID: "8bf4afc4-5581-49d5-853c-8309dd3ea1bd"). InnerVolumeSpecName "kube-api-access-dmvw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.373936 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.377718 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util" (OuterVolumeSpecName: "util") pod "8bf4afc4-5581-49d5-853c-8309dd3ea1bd" (UID: "8bf4afc4-5581-49d5-853c-8309dd3ea1bd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.462936 4592 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-util\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.463248 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmvw4\" (UniqueName: \"kubernetes.io/projected/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-kube-api-access-dmvw4\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.463265 4592 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8bf4afc4-5581-49d5-853c-8309dd3ea1bd-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.857378 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.857373 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx" event={"ID":"8bf4afc4-5581-49d5-853c-8309dd3ea1bd","Type":"ContainerDied","Data":"9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24"} Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.857563 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d809c7ac3e2c28a775f262de7807b53d58e52525fe5784793b0e20c96762f24" Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.859359 4592 generic.go:334] "Generic (PLEG): container finished" podID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerID="9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0" exitCode=0 Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.859411 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerDied","Data":"9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0"} Sep 29 17:05:37 crc kubenswrapper[4592]: I0929 17:05:37.859528 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerStarted","Data":"c2815f224f387899ba8c34bcdb6e641af90b6e7897830297965c4f6953e22c6d"} Sep 29 17:05:38 crc kubenswrapper[4592]: I0929 17:05:38.871570 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerStarted","Data":"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e"} Sep 29 17:05:39 crc kubenswrapper[4592]: I0929 17:05:39.883765 4592 generic.go:334] "Generic (PLEG): container finished" podID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerID="da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e" exitCode=0 Sep 29 17:05:39 crc kubenswrapper[4592]: I0929 17:05:39.884430 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerDied","Data":"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e"} Sep 29 17:05:40 crc kubenswrapper[4592]: I0929 17:05:40.894958 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerStarted","Data":"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709"} Sep 29 17:05:40 crc kubenswrapper[4592]: I0929 17:05:40.916287 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7bq9g" podStartSLOduration=2.490811261 podStartE2EDuration="4.916266685s" podCreationTimestamp="2025-09-29 17:05:36 +0000 UTC" firstStartedPulling="2025-09-29 17:05:37.861174797 +0000 UTC m=+868.008952468" lastFinishedPulling="2025-09-29 17:05:40.286630211 +0000 UTC m=+870.434407892" observedRunningTime="2025-09-29 17:05:40.911068148 +0000 UTC m=+871.058845829" watchObservedRunningTime="2025-09-29 17:05:40.916266685 +0000 UTC m=+871.064044376" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.466317 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr"] Sep 29 17:05:42 crc kubenswrapper[4592]: E0929 17:05:42.466933 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="util" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.466952 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="util" Sep 29 17:05:42 crc kubenswrapper[4592]: E0929 17:05:42.466964 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="pull" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.466971 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="pull" Sep 29 17:05:42 crc kubenswrapper[4592]: E0929 17:05:42.466982 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="extract" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.466992 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="extract" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.467183 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bf4afc4-5581-49d5-853c-8309dd3ea1bd" containerName="extract" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.467898 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.469596 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-lfnr7" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.498566 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr"] Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.544311 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddmsc\" (UniqueName: \"kubernetes.io/projected/83cbe230-dcbc-4c90-befd-35f5082eaba6-kube-api-access-ddmsc\") pod \"openstack-operator-controller-operator-7bf7677558-jj6jr\" (UID: \"83cbe230-dcbc-4c90-befd-35f5082eaba6\") " pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.645320 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddmsc\" (UniqueName: \"kubernetes.io/projected/83cbe230-dcbc-4c90-befd-35f5082eaba6-kube-api-access-ddmsc\") pod \"openstack-operator-controller-operator-7bf7677558-jj6jr\" (UID: \"83cbe230-dcbc-4c90-befd-35f5082eaba6\") " pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.670970 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddmsc\" (UniqueName: \"kubernetes.io/projected/83cbe230-dcbc-4c90-befd-35f5082eaba6-kube-api-access-ddmsc\") pod \"openstack-operator-controller-operator-7bf7677558-jj6jr\" (UID: \"83cbe230-dcbc-4c90-befd-35f5082eaba6\") " pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:42 crc kubenswrapper[4592]: I0929 17:05:42.784047 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:43 crc kubenswrapper[4592]: I0929 17:05:43.230572 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr"] Sep 29 17:05:43 crc kubenswrapper[4592]: I0929 17:05:43.918025 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" event={"ID":"83cbe230-dcbc-4c90-befd-35f5082eaba6","Type":"ContainerStarted","Data":"be22850e50e4f7af04484f080f4f25bcf7166d30b345c4dd61ef348a8ba1e1b2"} Sep 29 17:05:46 crc kubenswrapper[4592]: I0929 17:05:46.795189 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:46 crc kubenswrapper[4592]: I0929 17:05:46.795524 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:46 crc kubenswrapper[4592]: I0929 17:05:46.838369 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:46 crc kubenswrapper[4592]: I0929 17:05:46.982346 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.254776 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.255430 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7bq9g" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="registry-server" containerID="cri-o://62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709" gracePeriod=2 Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.792187 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.864593 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content\") pod \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.864686 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtk76\" (UniqueName: \"kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76\") pod \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.864739 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities\") pod \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\" (UID: \"1ce3ec7b-1d38-4037-b31f-d39125ff4157\") " Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.865890 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities" (OuterVolumeSpecName: "utilities") pod "1ce3ec7b-1d38-4037-b31f-d39125ff4157" (UID: "1ce3ec7b-1d38-4037-b31f-d39125ff4157"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.872256 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76" (OuterVolumeSpecName: "kube-api-access-jtk76") pod "1ce3ec7b-1d38-4037-b31f-d39125ff4157" (UID: "1ce3ec7b-1d38-4037-b31f-d39125ff4157"). InnerVolumeSpecName "kube-api-access-jtk76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.918525 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ce3ec7b-1d38-4037-b31f-d39125ff4157" (UID: "1ce3ec7b-1d38-4037-b31f-d39125ff4157"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.956876 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" event={"ID":"83cbe230-dcbc-4c90-befd-35f5082eaba6","Type":"ContainerStarted","Data":"b5f07a55f4ce5b4addcf27823f5796264134419e9cfec3d5aa043894f10f030f"} Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.958329 4592 generic.go:334] "Generic (PLEG): container finished" podID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerID="62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709" exitCode=0 Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.958358 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerDied","Data":"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709"} Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.958375 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7bq9g" event={"ID":"1ce3ec7b-1d38-4037-b31f-d39125ff4157","Type":"ContainerDied","Data":"c2815f224f387899ba8c34bcdb6e641af90b6e7897830297965c4f6953e22c6d"} Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.958391 4592 scope.go:117] "RemoveContainer" containerID="62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.958496 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7bq9g" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.970237 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.970275 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtk76\" (UniqueName: \"kubernetes.io/projected/1ce3ec7b-1d38-4037-b31f-d39125ff4157-kube-api-access-jtk76\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.970287 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce3ec7b-1d38-4037-b31f-d39125ff4157-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:05:49 crc kubenswrapper[4592]: I0929 17:05:49.990328 4592 scope.go:117] "RemoveContainer" containerID="da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.013113 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.016461 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7bq9g"] Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.016717 4592 scope.go:117] "RemoveContainer" containerID="9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.029701 4592 scope.go:117] "RemoveContainer" containerID="62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709" Sep 29 17:05:50 crc kubenswrapper[4592]: E0929 17:05:50.030109 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709\": container with ID starting with 62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709 not found: ID does not exist" containerID="62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.030136 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709"} err="failed to get container status \"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709\": rpc error: code = NotFound desc = could not find container \"62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709\": container with ID starting with 62daf3c12401d0eea37aac90c225f3bdd97c2f17820720dcebabf290620b7709 not found: ID does not exist" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.030196 4592 scope.go:117] "RemoveContainer" containerID="da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e" Sep 29 17:05:50 crc kubenswrapper[4592]: E0929 17:05:50.030507 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e\": container with ID starting with da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e not found: ID does not exist" containerID="da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.030531 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e"} err="failed to get container status \"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e\": rpc error: code = NotFound desc = could not find container \"da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e\": container with ID starting with da89d643a4191dfc90eee9dffed4400a1f6cc7c397785b8e888cc40c1049388e not found: ID does not exist" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.030545 4592 scope.go:117] "RemoveContainer" containerID="9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0" Sep 29 17:05:50 crc kubenswrapper[4592]: E0929 17:05:50.030808 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0\": container with ID starting with 9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0 not found: ID does not exist" containerID="9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0" Sep 29 17:05:50 crc kubenswrapper[4592]: I0929 17:05:50.030827 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0"} err="failed to get container status \"9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0\": rpc error: code = NotFound desc = could not find container \"9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0\": container with ID starting with 9b9c7bd224a641a629e79df27e86f0ec30308ec7637fde35344ebb8372908ef0 not found: ID does not exist" Sep 29 17:05:51 crc kubenswrapper[4592]: I0929 17:05:51.194599 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" path="/var/lib/kubelet/pods/1ce3ec7b-1d38-4037-b31f-d39125ff4157/volumes" Sep 29 17:05:52 crc kubenswrapper[4592]: I0929 17:05:52.976813 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" event={"ID":"83cbe230-dcbc-4c90-befd-35f5082eaba6","Type":"ContainerStarted","Data":"a8bf480775d5a2665c3cbd794f7367ee6d31cc629976d59e03b1af45174e215f"} Sep 29 17:05:52 crc kubenswrapper[4592]: I0929 17:05:52.977228 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:05:53 crc kubenswrapper[4592]: I0929 17:05:53.011226 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" podStartSLOduration=2.200638147 podStartE2EDuration="11.01120202s" podCreationTimestamp="2025-09-29 17:05:42 +0000 UTC" firstStartedPulling="2025-09-29 17:05:43.23980608 +0000 UTC m=+873.387583761" lastFinishedPulling="2025-09-29 17:05:52.050369933 +0000 UTC m=+882.198147634" observedRunningTime="2025-09-29 17:05:53.006138767 +0000 UTC m=+883.153916458" watchObservedRunningTime="2025-09-29 17:05:53.01120202 +0000 UTC m=+883.158979711" Sep 29 17:06:02 crc kubenswrapper[4592]: I0929 17:06:02.787045 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-7bf7677558-jj6jr" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.820481 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559"] Sep 29 17:06:17 crc kubenswrapper[4592]: E0929 17:06:17.822545 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="extract-utilities" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.822782 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="extract-utilities" Sep 29 17:06:17 crc kubenswrapper[4592]: E0929 17:06:17.822855 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="registry-server" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.822917 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="registry-server" Sep 29 17:06:17 crc kubenswrapper[4592]: E0929 17:06:17.822982 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="extract-content" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.823038 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="extract-content" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.823220 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce3ec7b-1d38-4037-b31f-d39125ff4157" containerName="registry-server" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.823943 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.824183 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.824970 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.827137 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-tvsg7" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.828487 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-s6z98" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.858818 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.870124 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.879600 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.880716 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:17 crc kubenswrapper[4592]: W0929 17:06:17.882613 4592 reflector.go:561] object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-s7wnc": failed to list *v1.Secret: secrets "designate-operator-controller-manager-dockercfg-s7wnc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Sep 29 17:06:17 crc kubenswrapper[4592]: E0929 17:06:17.882719 4592 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"designate-operator-controller-manager-dockercfg-s7wnc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"designate-operator-controller-manager-dockercfg-s7wnc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.890245 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k969c"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.891380 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.894092 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-knrqn" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.908909 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k969c"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.912790 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.926215 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.927536 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.929020 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-8prcl" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.933613 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slpmc\" (UniqueName: \"kubernetes.io/projected/de451eb0-13ae-4fab-a6f3-3cc8fb77566f-kube-api-access-slpmc\") pod \"cinder-operator-controller-manager-644bddb6d8-5p559\" (UID: \"de451eb0-13ae-4fab-a6f3-3cc8fb77566f\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.933655 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkxpl\" (UniqueName: \"kubernetes.io/projected/7ef58432-073e-43a5-bc36-38cb3611b118-kube-api-access-dkxpl\") pod \"barbican-operator-controller-manager-6ff8b75857-55xvk\" (UID: \"7ef58432-073e-43a5-bc36-38cb3611b118\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.933675 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpn7s\" (UniqueName: \"kubernetes.io/projected/ba256bd8-c14c-458e-b919-2feedb3a0c46-kube-api-access-zpn7s\") pod \"designate-operator-controller-manager-84f4f7b77b-bj7fn\" (UID: \"ba256bd8-c14c-458e-b919-2feedb3a0c46\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.941414 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56"] Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.942303 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.944262 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-kpz49" Sep 29 17:06:17 crc kubenswrapper[4592]: I0929 17:06:17.979204 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.002279 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.014018 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.015328 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.020802 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-slxrh" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.021018 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037194 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48nsg\" (UniqueName: \"kubernetes.io/projected/9c565c72-206a-42a7-943d-c55fd9065e5f-kube-api-access-48nsg\") pod \"heat-operator-controller-manager-5d889d78cf-f8v56\" (UID: \"9c565c72-206a-42a7-943d-c55fd9065e5f\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037251 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd6wl\" (UniqueName: \"kubernetes.io/projected/f8504fb5-9c3b-4b51-bf22-31c6bcdacad4-kube-api-access-wd6wl\") pod \"glance-operator-controller-manager-84958c4d49-k969c\" (UID: \"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037283 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-996c2\" (UniqueName: \"kubernetes.io/projected/1d74dab2-fe04-4218-8b91-4b958b0ad39d-kube-api-access-996c2\") pod \"horizon-operator-controller-manager-9f4696d94-p4r66\" (UID: \"1d74dab2-fe04-4218-8b91-4b958b0ad39d\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037368 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slpmc\" (UniqueName: \"kubernetes.io/projected/de451eb0-13ae-4fab-a6f3-3cc8fb77566f-kube-api-access-slpmc\") pod \"cinder-operator-controller-manager-644bddb6d8-5p559\" (UID: \"de451eb0-13ae-4fab-a6f3-3cc8fb77566f\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkxpl\" (UniqueName: \"kubernetes.io/projected/7ef58432-073e-43a5-bc36-38cb3611b118-kube-api-access-dkxpl\") pod \"barbican-operator-controller-manager-6ff8b75857-55xvk\" (UID: \"7ef58432-073e-43a5-bc36-38cb3611b118\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.037423 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpn7s\" (UniqueName: \"kubernetes.io/projected/ba256bd8-c14c-458e-b919-2feedb3a0c46-kube-api-access-zpn7s\") pod \"designate-operator-controller-manager-84f4f7b77b-bj7fn\" (UID: \"ba256bd8-c14c-458e-b919-2feedb3a0c46\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.065276 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.066253 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.080260 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-xz2px" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.098055 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.114056 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slpmc\" (UniqueName: \"kubernetes.io/projected/de451eb0-13ae-4fab-a6f3-3cc8fb77566f-kube-api-access-slpmc\") pod \"cinder-operator-controller-manager-644bddb6d8-5p559\" (UID: \"de451eb0-13ae-4fab-a6f3-3cc8fb77566f\") " pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.115273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkxpl\" (UniqueName: \"kubernetes.io/projected/7ef58432-073e-43a5-bc36-38cb3611b118-kube-api-access-dkxpl\") pod \"barbican-operator-controller-manager-6ff8b75857-55xvk\" (UID: \"7ef58432-073e-43a5-bc36-38cb3611b118\") " pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.137225 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139319 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48nsg\" (UniqueName: \"kubernetes.io/projected/9c565c72-206a-42a7-943d-c55fd9065e5f-kube-api-access-48nsg\") pod \"heat-operator-controller-manager-5d889d78cf-f8v56\" (UID: \"9c565c72-206a-42a7-943d-c55fd9065e5f\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139366 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n52dj\" (UniqueName: \"kubernetes.io/projected/a4b81165-b69a-40fa-b875-6d138351d6e6-kube-api-access-n52dj\") pod \"ironic-operator-controller-manager-7975b88857-kjsck\" (UID: \"a4b81165-b69a-40fa-b875-6d138351d6e6\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139387 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd6wl\" (UniqueName: \"kubernetes.io/projected/f8504fb5-9c3b-4b51-bf22-31c6bcdacad4-kube-api-access-wd6wl\") pod \"glance-operator-controller-manager-84958c4d49-k969c\" (UID: \"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139408 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6nn\" (UniqueName: \"kubernetes.io/projected/a4e61264-26ad-4012-be6c-4d6596b4ab27-kube-api-access-nv6nn\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139433 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-996c2\" (UniqueName: \"kubernetes.io/projected/1d74dab2-fe04-4218-8b91-4b958b0ad39d-kube-api-access-996c2\") pod \"horizon-operator-controller-manager-9f4696d94-p4r66\" (UID: \"1d74dab2-fe04-4218-8b91-4b958b0ad39d\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.139471 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.147612 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpn7s\" (UniqueName: \"kubernetes.io/projected/ba256bd8-c14c-458e-b919-2feedb3a0c46-kube-api-access-zpn7s\") pod \"designate-operator-controller-manager-84f4f7b77b-bj7fn\" (UID: \"ba256bd8-c14c-458e-b919-2feedb3a0c46\") " pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.162530 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.176566 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.187973 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.189404 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.203618 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-vv7mw" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.204652 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd6wl\" (UniqueName: \"kubernetes.io/projected/f8504fb5-9c3b-4b51-bf22-31c6bcdacad4-kube-api-access-wd6wl\") pod \"glance-operator-controller-manager-84958c4d49-k969c\" (UID: \"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4\") " pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.213396 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.214754 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.217114 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-996c2\" (UniqueName: \"kubernetes.io/projected/1d74dab2-fe04-4218-8b91-4b958b0ad39d-kube-api-access-996c2\") pod \"horizon-operator-controller-manager-9f4696d94-p4r66\" (UID: \"1d74dab2-fe04-4218-8b91-4b958b0ad39d\") " pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.222026 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-2xg2s" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.222420 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.225051 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48nsg\" (UniqueName: \"kubernetes.io/projected/9c565c72-206a-42a7-943d-c55fd9065e5f-kube-api-access-48nsg\") pod \"heat-operator-controller-manager-5d889d78cf-f8v56\" (UID: \"9c565c72-206a-42a7-943d-c55fd9065e5f\") " pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.233302 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.241481 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.241604 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n52dj\" (UniqueName: \"kubernetes.io/projected/a4b81165-b69a-40fa-b875-6d138351d6e6-kube-api-access-n52dj\") pod \"ironic-operator-controller-manager-7975b88857-kjsck\" (UID: \"a4b81165-b69a-40fa-b875-6d138351d6e6\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.241656 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b426w\" (UniqueName: \"kubernetes.io/projected/bfa2f914-2596-49e6-bb75-760663a69813-kube-api-access-b426w\") pod \"manila-operator-controller-manager-6d68dbc695-cffzv\" (UID: \"bfa2f914-2596-49e6-bb75-760663a69813\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.241685 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6nn\" (UniqueName: \"kubernetes.io/projected/a4e61264-26ad-4012-be6c-4d6596b4ab27-kube-api-access-nv6nn\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.242084 4592 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.242158 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert podName:a4e61264-26ad-4012-be6c-4d6596b4ab27 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:18.742122509 +0000 UTC m=+908.889900200 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert") pod "infra-operator-controller-manager-7d857cc749-b8fbf" (UID: "a4e61264-26ad-4012-be6c-4d6596b4ab27") : secret "infra-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.253046 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.268509 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.270765 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.304083 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6nn\" (UniqueName: \"kubernetes.io/projected/a4e61264-26ad-4012-be6c-4d6596b4ab27-kube-api-access-nv6nn\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.304555 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n52dj\" (UniqueName: \"kubernetes.io/projected/a4b81165-b69a-40fa-b875-6d138351d6e6-kube-api-access-n52dj\") pod \"ironic-operator-controller-manager-7975b88857-kjsck\" (UID: \"a4b81165-b69a-40fa-b875-6d138351d6e6\") " pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.311701 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.312711 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.319339 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-kcjmd" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.320086 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.337735 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.351795 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-pvh9x" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.371880 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.373205 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srwp7\" (UniqueName: \"kubernetes.io/projected/dff5de8b-2910-4e5a-a80a-089c649039cd-kube-api-access-srwp7\") pod \"keystone-operator-controller-manager-5bd55b4bff-9n2d2\" (UID: \"dff5de8b-2910-4e5a-a80a-089c649039cd\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.373309 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b426w\" (UniqueName: \"kubernetes.io/projected/bfa2f914-2596-49e6-bb75-760663a69813-kube-api-access-b426w\") pod \"manila-operator-controller-manager-6d68dbc695-cffzv\" (UID: \"bfa2f914-2596-49e6-bb75-760663a69813\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.380268 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.442725 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.442834 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.449364 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-dkqn9" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.457009 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.461508 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b426w\" (UniqueName: \"kubernetes.io/projected/bfa2f914-2596-49e6-bb75-760663a69813-kube-api-access-b426w\") pod \"manila-operator-controller-manager-6d68dbc695-cffzv\" (UID: \"bfa2f914-2596-49e6-bb75-760663a69813\") " pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.481432 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.485511 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.488069 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.495239 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kfrg\" (UniqueName: \"kubernetes.io/projected/fd9f041b-9fd6-4d50-bc82-35fd86eea539-kube-api-access-6kfrg\") pod \"neutron-operator-controller-manager-64d7b59854-7sgxz\" (UID: \"fd9f041b-9fd6-4d50-bc82-35fd86eea539\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.495341 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srwp7\" (UniqueName: \"kubernetes.io/projected/dff5de8b-2910-4e5a-a80a-089c649039cd-kube-api-access-srwp7\") pod \"keystone-operator-controller-manager-5bd55b4bff-9n2d2\" (UID: \"dff5de8b-2910-4e5a-a80a-089c649039cd\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.495368 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nnsm\" (UniqueName: \"kubernetes.io/projected/6bf183ea-90d6-4aff-9e61-d4cc3692fe08-kube-api-access-8nnsm\") pod \"mariadb-operator-controller-manager-88c7-ssmqx\" (UID: \"6bf183ea-90d6-4aff-9e61-d4cc3692fe08\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.524739 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-qtsmn" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.527186 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.540020 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srwp7\" (UniqueName: \"kubernetes.io/projected/dff5de8b-2910-4e5a-a80a-089c649039cd-kube-api-access-srwp7\") pod \"keystone-operator-controller-manager-5bd55b4bff-9n2d2\" (UID: \"dff5de8b-2910-4e5a-a80a-089c649039cd\") " pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.563577 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.564857 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.573163 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-7fw4f" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.573336 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.608572 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nnsm\" (UniqueName: \"kubernetes.io/projected/6bf183ea-90d6-4aff-9e61-d4cc3692fe08-kube-api-access-8nnsm\") pod \"mariadb-operator-controller-manager-88c7-ssmqx\" (UID: \"6bf183ea-90d6-4aff-9e61-d4cc3692fe08\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.608634 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szncj\" (UniqueName: \"kubernetes.io/projected/5746404b-3a0f-4851-9de9-28e4e7ef8f1f-kube-api-access-szncj\") pod \"nova-operator-controller-manager-c7c776c96-hg6b2\" (UID: \"5746404b-3a0f-4851-9de9-28e4e7ef8f1f\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.608681 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r57z5\" (UniqueName: \"kubernetes.io/projected/87bb1f2b-bc93-4b10-aa27-b8efd9ba669a-kube-api-access-r57z5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fc5kq\" (UID: \"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.608705 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kfrg\" (UniqueName: \"kubernetes.io/projected/fd9f041b-9fd6-4d50-bc82-35fd86eea539-kube-api-access-6kfrg\") pod \"neutron-operator-controller-manager-64d7b59854-7sgxz\" (UID: \"fd9f041b-9fd6-4d50-bc82-35fd86eea539\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.609251 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.615121 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.631480 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.632519 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.641573 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-jbvwh" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.652411 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nnsm\" (UniqueName: \"kubernetes.io/projected/6bf183ea-90d6-4aff-9e61-d4cc3692fe08-kube-api-access-8nnsm\") pod \"mariadb-operator-controller-manager-88c7-ssmqx\" (UID: \"6bf183ea-90d6-4aff-9e61-d4cc3692fe08\") " pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.668489 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.677118 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.680899 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.689056 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.691093 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.703580 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-664kx" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.706119 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.709435 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szncj\" (UniqueName: \"kubernetes.io/projected/5746404b-3a0f-4851-9de9-28e4e7ef8f1f-kube-api-access-szncj\") pod \"nova-operator-controller-manager-c7c776c96-hg6b2\" (UID: \"5746404b-3a0f-4851-9de9-28e4e7ef8f1f\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.709477 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.709513 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkpgn\" (UniqueName: \"kubernetes.io/projected/965c0641-f6e8-44e3-a8a1-32028665b9e2-kube-api-access-xkpgn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.709563 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r57z5\" (UniqueName: \"kubernetes.io/projected/87bb1f2b-bc93-4b10-aa27-b8efd9ba669a-kube-api-access-r57z5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fc5kq\" (UID: \"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.709590 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l29ww\" (UniqueName: \"kubernetes.io/projected/bb38bf88-f05d-4e0e-8923-66b2097e247c-kube-api-access-l29ww\") pod \"ovn-operator-controller-manager-9976ff44c-l97v2\" (UID: \"bb38bf88-f05d-4e0e-8923-66b2097e247c\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.710082 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.725837 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-sjcp8" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.743095 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szncj\" (UniqueName: \"kubernetes.io/projected/5746404b-3a0f-4851-9de9-28e4e7ef8f1f-kube-api-access-szncj\") pod \"nova-operator-controller-manager-c7c776c96-hg6b2\" (UID: \"5746404b-3a0f-4851-9de9-28e4e7ef8f1f\") " pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.761213 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r57z5\" (UniqueName: \"kubernetes.io/projected/87bb1f2b-bc93-4b10-aa27-b8efd9ba669a-kube-api-access-r57z5\") pod \"octavia-operator-controller-manager-76fcc6dc7c-fc5kq\" (UID: \"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a\") " pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.765906 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kfrg\" (UniqueName: \"kubernetes.io/projected/fd9f041b-9fd6-4d50-bc82-35fd86eea539-kube-api-access-6kfrg\") pod \"neutron-operator-controller-manager-64d7b59854-7sgxz\" (UID: \"fd9f041b-9fd6-4d50-bc82-35fd86eea539\") " pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.774457 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.802996 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.808559 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.814881 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.814939 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkpgn\" (UniqueName: \"kubernetes.io/projected/965c0641-f6e8-44e3-a8a1-32028665b9e2-kube-api-access-xkpgn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.815000 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l29ww\" (UniqueName: \"kubernetes.io/projected/bb38bf88-f05d-4e0e-8923-66b2097e247c-kube-api-access-l29ww\") pod \"ovn-operator-controller-manager-9976ff44c-l97v2\" (UID: \"bb38bf88-f05d-4e0e-8923-66b2097e247c\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.815045 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nplpz\" (UniqueName: \"kubernetes.io/projected/9974276a-24ba-4ca1-9c70-f85e17e9c10c-kube-api-access-nplpz\") pod \"swift-operator-controller-manager-bc7dc7bd9-pfzkm\" (UID: \"9974276a-24ba-4ca1-9c70-f85e17e9c10c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.815091 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.815162 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kx6d\" (UniqueName: \"kubernetes.io/projected/1100f7ed-81d3-49d8-9852-867de93e273b-kube-api-access-6kx6d\") pod \"placement-operator-controller-manager-589c58c6c-j5st9\" (UID: \"1100f7ed-81d3-49d8-9852-867de93e273b\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.815370 4592 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.815413 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert podName:965c0641-f6e8-44e3-a8a1-32028665b9e2 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:19.315400544 +0000 UTC m=+909.463178225 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-fqfbf" (UID: "965c0641-f6e8-44e3-a8a1-32028665b9e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.815912 4592 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: E0929 17:06:18.815945 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert podName:a4e61264-26ad-4012-be6c-4d6596b4ab27 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:19.815933629 +0000 UTC m=+909.963711320 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert") pod "infra-operator-controller-manager-7d857cc749-b8fbf" (UID: "a4e61264-26ad-4012-be6c-4d6596b4ab27") : secret "infra-operator-webhook-server-cert" not found Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.816206 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-5qrnr" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.824021 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.838119 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.839846 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.850548 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-s7wnc" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.854227 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.863596 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.904241 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-mxl29"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.905718 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.911903 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkpgn\" (UniqueName: \"kubernetes.io/projected/965c0641-f6e8-44e3-a8a1-32028665b9e2-kube-api-access-xkpgn\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.916450 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nplpz\" (UniqueName: \"kubernetes.io/projected/9974276a-24ba-4ca1-9c70-f85e17e9c10c-kube-api-access-nplpz\") pod \"swift-operator-controller-manager-bc7dc7bd9-pfzkm\" (UID: \"9974276a-24ba-4ca1-9c70-f85e17e9c10c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.916547 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t6rl\" (UniqueName: \"kubernetes.io/projected/5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4-kube-api-access-7t6rl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-77pht\" (UID: \"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.916580 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kx6d\" (UniqueName: \"kubernetes.io/projected/1100f7ed-81d3-49d8-9852-867de93e273b-kube-api-access-6kx6d\") pod \"placement-operator-controller-manager-589c58c6c-j5st9\" (UID: \"1100f7ed-81d3-49d8-9852-867de93e273b\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.918819 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-b4sph" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.922824 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l29ww\" (UniqueName: \"kubernetes.io/projected/bb38bf88-f05d-4e0e-8923-66b2097e247c-kube-api-access-l29ww\") pod \"ovn-operator-controller-manager-9976ff44c-l97v2\" (UID: \"bb38bf88-f05d-4e0e-8923-66b2097e247c\") " pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.924386 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-mxl29"] Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.965592 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:18 crc kubenswrapper[4592]: I0929 17:06:18.972819 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kx6d\" (UniqueName: \"kubernetes.io/projected/1100f7ed-81d3-49d8-9852-867de93e273b-kube-api-access-6kx6d\") pod \"placement-operator-controller-manager-589c58c6c-j5st9\" (UID: \"1100f7ed-81d3-49d8-9852-867de93e273b\") " pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:18.996303 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nplpz\" (UniqueName: \"kubernetes.io/projected/9974276a-24ba-4ca1-9c70-f85e17e9c10c-kube-api-access-nplpz\") pod \"swift-operator-controller-manager-bc7dc7bd9-pfzkm\" (UID: \"9974276a-24ba-4ca1-9c70-f85e17e9c10c\") " pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.020723 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbgct\" (UniqueName: \"kubernetes.io/projected/33a1eea4-82d2-438a-a844-6539c3016172-kube-api-access-fbgct\") pod \"test-operator-controller-manager-f66b554c6-mxl29\" (UID: \"33a1eea4-82d2-438a-a844-6539c3016172\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.020887 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t6rl\" (UniqueName: \"kubernetes.io/projected/5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4-kube-api-access-7t6rl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-77pht\" (UID: \"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.021291 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.022670 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.029165 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.039318 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-bmsdw" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.045950 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.054822 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t6rl\" (UniqueName: \"kubernetes.io/projected/5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4-kube-api-access-7t6rl\") pod \"telemetry-operator-controller-manager-b8d54b5d7-77pht\" (UID: \"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4\") " pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.061426 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.077517 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.121790 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88ls2\" (UniqueName: \"kubernetes.io/projected/50b8b2f8-551f-4379-84b6-5b217fa8b50c-kube-api-access-88ls2\") pod \"watcher-operator-controller-manager-76669f99c-mqwzl\" (UID: \"50b8b2f8-551f-4379-84b6-5b217fa8b50c\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.122200 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbgct\" (UniqueName: \"kubernetes.io/projected/33a1eea4-82d2-438a-a844-6539c3016172-kube-api-access-fbgct\") pod \"test-operator-controller-manager-f66b554c6-mxl29\" (UID: \"33a1eea4-82d2-438a-a844-6539c3016172\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.147300 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.148447 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.157492 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.167619 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-zmzxm" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.171021 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.197276 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbgct\" (UniqueName: \"kubernetes.io/projected/33a1eea4-82d2-438a-a844-6539c3016172-kube-api-access-fbgct\") pod \"test-operator-controller-manager-f66b554c6-mxl29\" (UID: \"33a1eea4-82d2-438a-a844-6539c3016172\") " pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.237271 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88ls2\" (UniqueName: \"kubernetes.io/projected/50b8b2f8-551f-4379-84b6-5b217fa8b50c-kube-api-access-88ls2\") pod \"watcher-operator-controller-manager-76669f99c-mqwzl\" (UID: \"50b8b2f8-551f-4379-84b6-5b217fa8b50c\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.266372 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88ls2\" (UniqueName: \"kubernetes.io/projected/50b8b2f8-551f-4379-84b6-5b217fa8b50c-kube-api-access-88ls2\") pod \"watcher-operator-controller-manager-76669f99c-mqwzl\" (UID: \"50b8b2f8-551f-4379-84b6-5b217fa8b50c\") " pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.328685 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.347930 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.348024 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.348094 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxfvz\" (UniqueName: \"kubernetes.io/projected/8da8ce1f-60e7-4381-975e-daf9c5225b10-kube-api-access-dxfvz\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.348532 4592 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.348597 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert podName:965c0641-f6e8-44e3-a8a1-32028665b9e2 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:20.348569694 +0000 UTC m=+910.496347375 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert") pod "openstack-baremetal-operator-controller-manager-6d776955-fqfbf" (UID: "965c0641-f6e8-44e3-a8a1-32028665b9e2") : secret "openstack-baremetal-operator-webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.356428 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.356461 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.361850 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.362193 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.368067 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-rzwkj" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.396040 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.449322 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.449953 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.450000 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxfvz\" (UniqueName: \"kubernetes.io/projected/8da8ce1f-60e7-4381-975e-daf9c5225b10-kube-api-access-dxfvz\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.450248 4592 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.450639 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert podName:8da8ce1f-60e7-4381-975e-daf9c5225b10 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:19.950327169 +0000 UTC m=+910.098104840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert") pod "openstack-operator-controller-manager-7b9c4c58f5-fdd5n" (UID: "8da8ce1f-60e7-4381-975e-daf9c5225b10") : secret "webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.491907 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxfvz\" (UniqueName: \"kubernetes.io/projected/8da8ce1f-60e7-4381-975e-daf9c5225b10-kube-api-access-dxfvz\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.552405 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86mp2\" (UniqueName: \"kubernetes.io/projected/a3543654-318a-48ed-8109-a76d758b231d-kube-api-access-86mp2\") pod \"rabbitmq-cluster-operator-manager-79d8469568-2pcjb\" (UID: \"a3543654-318a-48ed-8109-a76d758b231d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.565529 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66"] Sep 29 17:06:19 crc kubenswrapper[4592]: W0929 17:06:19.568305 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d74dab2_fe04_4218_8b91_4b958b0ad39d.slice/crio-fb449b0f71be49a96361ed97372fa731968a3393dd79a1a4b73f2160dfed5dfd WatchSource:0}: Error finding container fb449b0f71be49a96361ed97372fa731968a3393dd79a1a4b73f2160dfed5dfd: Status 404 returned error can't find the container with id fb449b0f71be49a96361ed97372fa731968a3393dd79a1a4b73f2160dfed5dfd Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.655653 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86mp2\" (UniqueName: \"kubernetes.io/projected/a3543654-318a-48ed-8109-a76d758b231d-kube-api-access-86mp2\") pod \"rabbitmq-cluster-operator-manager-79d8469568-2pcjb\" (UID: \"a3543654-318a-48ed-8109-a76d758b231d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.659742 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.699192 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86mp2\" (UniqueName: \"kubernetes.io/projected/a3543654-318a-48ed-8109-a76d758b231d-kube-api-access-86mp2\") pod \"rabbitmq-cluster-operator-manager-79d8469568-2pcjb\" (UID: \"a3543654-318a-48ed-8109-a76d758b231d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.724951 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.839950 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk"] Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.861062 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.874576 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a4e61264-26ad-4012-be6c-4d6596b4ab27-cert\") pod \"infra-operator-controller-manager-7d857cc749-b8fbf\" (UID: \"a4e61264-26ad-4012-be6c-4d6596b4ab27\") " pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.965417 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.966681 4592 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: E0929 17:06:19.966772 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert podName:8da8ce1f-60e7-4381-975e-daf9c5225b10 nodeName:}" failed. No retries permitted until 2025-09-29 17:06:20.966750397 +0000 UTC m=+911.114528078 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert") pod "openstack-operator-controller-manager-7b9c4c58f5-fdd5n" (UID: "8da8ce1f-60e7-4381-975e-daf9c5225b10") : secret "webhook-server-cert" not found Sep 29 17:06:19 crc kubenswrapper[4592]: I0929 17:06:19.975561 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-84958c4d49-k969c"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.130411 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.229861 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.234282 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.245218 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.252861 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" event={"ID":"9c565c72-206a-42a7-943d-c55fd9065e5f","Type":"ContainerStarted","Data":"2b58aeb45cf5146cb6743bed744014ba9d09d0b95382b754c08efbb6107a4eda"} Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.253121 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2"] Sep 29 17:06:20 crc kubenswrapper[4592]: W0929 17:06:20.256248 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4b81165_b69a_40fa_b875_6d138351d6e6.slice/crio-c140e174b5520f50e588d85066a61b592751a5a44d9832dc4814969645218619 WatchSource:0}: Error finding container c140e174b5520f50e588d85066a61b592751a5a44d9832dc4814969645218619: Status 404 returned error can't find the container with id c140e174b5520f50e588d85066a61b592751a5a44d9832dc4814969645218619 Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.256672 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" event={"ID":"1d74dab2-fe04-4218-8b91-4b958b0ad39d","Type":"ContainerStarted","Data":"fb449b0f71be49a96361ed97372fa731968a3393dd79a1a4b73f2160dfed5dfd"} Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.259641 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" event={"ID":"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4","Type":"ContainerStarted","Data":"8e1664844fa17a24172384ed0189cc15d1a32c2f933fa6b72e8ffbb30aca5a25"} Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.260773 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" event={"ID":"7ef58432-073e-43a5-bc36-38cb3611b118","Type":"ContainerStarted","Data":"00511dea527533d64bd54174ea8a1351479aa4f10efa48c8faf59d26d6787d97"} Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.261596 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" event={"ID":"de451eb0-13ae-4fab-a6f3-3cc8fb77566f","Type":"ContainerStarted","Data":"c28a7b7f004be37a19201694a30d5d4a1a57cf33ad7d7c9a5d2710a96e22970d"} Sep 29 17:06:20 crc kubenswrapper[4592]: W0929 17:06:20.283844 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbfa2f914_2596_49e6_bb75_760663a69813.slice/crio-bed370067659122ea4dfed272b6ae369283cce7f6c2f62972dea7d71707af094 WatchSource:0}: Error finding container bed370067659122ea4dfed272b6ae369283cce7f6c2f62972dea7d71707af094: Status 404 returned error can't find the container with id bed370067659122ea4dfed272b6ae369283cce7f6c2f62972dea7d71707af094 Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.372749 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.384115 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/965c0641-f6e8-44e3-a8a1-32028665b9e2-cert\") pod \"openstack-baremetal-operator-controller-manager-6d776955-fqfbf\" (UID: \"965c0641-f6e8-44e3-a8a1-32028665b9e2\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.435283 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.661591 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.677944 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.700646 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.703286 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.735200 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.750562 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.762232 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.770598 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.778560 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.782437 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-f66b554c6-mxl29"] Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.808530 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nplpz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-bc7dc7bd9-pfzkm_openstack-operators(9974276a-24ba-4ca1-9c70-f85e17e9c10c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.810834 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-88ls2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-76669f99c-mqwzl_openstack-operators(50b8b2f8-551f-4379-84b6-5b217fa8b50c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 17:06:20 crc kubenswrapper[4592]: W0929 17:06:20.817350 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33a1eea4_82d2_438a_a844_6539c3016172.slice/crio-17aec8e9b105e6f5f8d5ee523aa262857063b9ffed6cadbf661cedf699272a1a WatchSource:0}: Error finding container 17aec8e9b105e6f5f8d5ee523aa262857063b9ffed6cadbf661cedf699272a1a: Status 404 returned error can't find the container with id 17aec8e9b105e6f5f8d5ee523aa262857063b9ffed6cadbf661cedf699272a1a Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.821870 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb"] Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.834330 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nv6nn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-7d857cc749-b8fbf_openstack-operators(a4e61264-26ad-4012-be6c-4d6596b4ab27): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.834405 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-86mp2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-79d8469568-2pcjb_openstack-operators(a3543654-318a-48ed-8109-a76d758b231d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.834502 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fbgct,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-f66b554c6-mxl29_openstack-operators(33a1eea4-82d2-438a-a844-6539c3016172): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Sep 29 17:06:20 crc kubenswrapper[4592]: E0929 17:06:20.835950 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" podUID="a3543654-318a-48ed-8109-a76d758b231d" Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.837525 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf"] Sep 29 17:06:20 crc kubenswrapper[4592]: I0929 17:06:20.858636 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf"] Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.000309 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.008970 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8da8ce1f-60e7-4381-975e-daf9c5225b10-cert\") pod \"openstack-operator-controller-manager-7b9c4c58f5-fdd5n\" (UID: \"8da8ce1f-60e7-4381-975e-daf9c5225b10\") " pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.136393 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.158645 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" podUID="9974276a-24ba-4ca1-9c70-f85e17e9c10c" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.275182 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" event={"ID":"a4b81165-b69a-40fa-b875-6d138351d6e6","Type":"ContainerStarted","Data":"c140e174b5520f50e588d85066a61b592751a5a44d9832dc4814969645218619"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.280063 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" event={"ID":"9974276a-24ba-4ca1-9c70-f85e17e9c10c","Type":"ContainerStarted","Data":"25f5943d04ea7b3cd1f340ca6b3f9e98763d864c43494f33a3e5cf8d7d9ff3f1"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.280130 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" event={"ID":"9974276a-24ba-4ca1-9c70-f85e17e9c10c","Type":"ContainerStarted","Data":"2792715482ef0ec886bd284cd5d532821de1e207599608ff7f7f60ce1c8acb89"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.303070 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" podUID="9974276a-24ba-4ca1-9c70-f85e17e9c10c" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.305630 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" event={"ID":"fd9f041b-9fd6-4d50-bc82-35fd86eea539","Type":"ContainerStarted","Data":"c3f5ac66fb792f5d81459c96b1234f964a571d242607baea6512f9840686a5e0"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.317488 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" event={"ID":"5746404b-3a0f-4851-9de9-28e4e7ef8f1f","Type":"ContainerStarted","Data":"082c9ba4e91eb6852964e261998811d52936ca821fffca0af2dbd16cb3cc343a"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.331605 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" podUID="50b8b2f8-551f-4379-84b6-5b217fa8b50c" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.341853 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" event={"ID":"1100f7ed-81d3-49d8-9852-867de93e273b","Type":"ContainerStarted","Data":"ba3551cf75417fda755ff02dca7941718df59050e95c05da4dad5bb97878a345"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.345971 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" podUID="33a1eea4-82d2-438a-a844-6539c3016172" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.372163 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" event={"ID":"50b8b2f8-551f-4379-84b6-5b217fa8b50c","Type":"ContainerStarted","Data":"45a28cb69056411478da2e9f6efe719be144d58164115c04e299258ac940e82c"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.385244 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" podUID="50b8b2f8-551f-4379-84b6-5b217fa8b50c" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.392530 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" event={"ID":"bb38bf88-f05d-4e0e-8923-66b2097e247c","Type":"ContainerStarted","Data":"b331a67d223a61a10d27b171fff2ec927c45aa1d80465d0dcb282f0af14aaecb"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.414548 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" event={"ID":"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4","Type":"ContainerStarted","Data":"d9ba56e7b7068100958677683d2af731938c47483911dc546858b71ef757fe64"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.414741 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" podUID="a4e61264-26ad-4012-be6c-4d6596b4ab27" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.427676 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" event={"ID":"6bf183ea-90d6-4aff-9e61-d4cc3692fe08","Type":"ContainerStarted","Data":"747c4fa5d3faf4c07ea51285a96d6aa7c4dfda279d5f49a9a8f3f9af7437f59a"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.433412 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" event={"ID":"a4e61264-26ad-4012-be6c-4d6596b4ab27","Type":"ContainerStarted","Data":"150880d26f6b25881187e5b7946e00850b5d08759484fef8d4318ead9c6f3876"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.435234 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" podUID="a4e61264-26ad-4012-be6c-4d6596b4ab27" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.441509 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" event={"ID":"ba256bd8-c14c-458e-b919-2feedb3a0c46","Type":"ContainerStarted","Data":"404d1838f29f854396ed5833e72b2b233d5fe699d45f13a0eade6c1c9ce0deed"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.443955 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" event={"ID":"a3543654-318a-48ed-8109-a76d758b231d","Type":"ContainerStarted","Data":"a35120671bec21876dac1d508f4894a013ea7ef2b3b961b4db14a087a42e36f9"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.445986 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" podUID="a3543654-318a-48ed-8109-a76d758b231d" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.448496 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" event={"ID":"bfa2f914-2596-49e6-bb75-760663a69813","Type":"ContainerStarted","Data":"bed370067659122ea4dfed272b6ae369283cce7f6c2f62972dea7d71707af094"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.455202 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" event={"ID":"965c0641-f6e8-44e3-a8a1-32028665b9e2","Type":"ContainerStarted","Data":"91ea10eff3af5f2fcf6271ff4b476efca6394418081bcf41a9ee401d46bfa88c"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.460258 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" event={"ID":"33a1eea4-82d2-438a-a844-6539c3016172","Type":"ContainerStarted","Data":"17aec8e9b105e6f5f8d5ee523aa262857063b9ffed6cadbf661cedf699272a1a"} Sep 29 17:06:21 crc kubenswrapper[4592]: E0929 17:06:21.462992 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" podUID="33a1eea4-82d2-438a-a844-6539c3016172" Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.527346 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" event={"ID":"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a","Type":"ContainerStarted","Data":"217e2bb27d8abb9084cef48af6cd2cbb995d540501a147af2692b245709dd3b9"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.530210 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" event={"ID":"dff5de8b-2910-4e5a-a80a-089c649039cd","Type":"ContainerStarted","Data":"0b413784daa86f38f3a90ea0f8f7306fb71a251981ad54d0f652c479a6f2e1b6"} Sep 29 17:06:21 crc kubenswrapper[4592]: I0929 17:06:21.804279 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n"] Sep 29 17:06:22 crc kubenswrapper[4592]: I0929 17:06:22.555009 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" event={"ID":"33a1eea4-82d2-438a-a844-6539c3016172","Type":"ContainerStarted","Data":"d1cbb7086cfe222a20e9858d7641e0326d76300bbf11750a7b8c89c415f9a13d"} Sep 29 17:06:22 crc kubenswrapper[4592]: E0929 17:06:22.556782 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" podUID="33a1eea4-82d2-438a-a844-6539c3016172" Sep 29 17:06:22 crc kubenswrapper[4592]: I0929 17:06:22.563695 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" event={"ID":"50b8b2f8-551f-4379-84b6-5b217fa8b50c","Type":"ContainerStarted","Data":"7427f9d1e8b7fed7dd6f00861806df57008e48ab06716a9c2e44a5360ea660b8"} Sep 29 17:06:22 crc kubenswrapper[4592]: E0929 17:06:22.565568 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" podUID="50b8b2f8-551f-4379-84b6-5b217fa8b50c" Sep 29 17:06:22 crc kubenswrapper[4592]: I0929 17:06:22.628347 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" event={"ID":"a4e61264-26ad-4012-be6c-4d6596b4ab27","Type":"ContainerStarted","Data":"4669178057c131d36fd293fc275300de4d7d50474202c17924834c8c1f85b8aa"} Sep 29 17:06:22 crc kubenswrapper[4592]: I0929 17:06:22.645535 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" event={"ID":"8da8ce1f-60e7-4381-975e-daf9c5225b10","Type":"ContainerStarted","Data":"69518aa580ff5b58c62e9d9e73dc935d393a93930d18371b646715a119daab5a"} Sep 29 17:06:22 crc kubenswrapper[4592]: I0929 17:06:22.645583 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" event={"ID":"8da8ce1f-60e7-4381-975e-daf9c5225b10","Type":"ContainerStarted","Data":"91a57f95b1498702a4f3e9ff257ab7a0f4e286a8608871e9f990dea3b232db69"} Sep 29 17:06:22 crc kubenswrapper[4592]: E0929 17:06:22.647068 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:225524223bf2a7f3a4ce95958fc9ca6fdab02745fb70374e8ff5bf1ddaceda4b\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" podUID="a3543654-318a-48ed-8109-a76d758b231d" Sep 29 17:06:22 crc kubenswrapper[4592]: E0929 17:06:22.647507 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" podUID="a4e61264-26ad-4012-be6c-4d6596b4ab27" Sep 29 17:06:22 crc kubenswrapper[4592]: E0929 17:06:22.647562 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3c6f7d737e0196ec302f44354228d783ad3b210a75703dda3b39c15c01a67e8c\\\"\"" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" podUID="9974276a-24ba-4ca1-9c70-f85e17e9c10c" Sep 29 17:06:23 crc kubenswrapper[4592]: I0929 17:06:23.685784 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" event={"ID":"8da8ce1f-60e7-4381-975e-daf9c5225b10","Type":"ContainerStarted","Data":"5579933cff8e6e3c7c3a84b69101464c33b87b4c09c60c265329557753a7772f"} Sep 29 17:06:23 crc kubenswrapper[4592]: I0929 17:06:23.686403 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:23 crc kubenswrapper[4592]: E0929 17:06:23.687569 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:a303e460aec09217f90043b8ff19c01061af003b614833b33a593df9c00ddf80\\\"\"" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" podUID="33a1eea4-82d2-438a-a844-6539c3016172" Sep 29 17:06:23 crc kubenswrapper[4592]: E0929 17:06:23.687729 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7169dfadf5f5589f14ca52700d2eba991c2a0c7733f6a1ea795752d993d7f61b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" podUID="50b8b2f8-551f-4379-84b6-5b217fa8b50c" Sep 29 17:06:23 crc kubenswrapper[4592]: E0929 17:06:23.688639 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:de99ad053f95f132f62b38335b2e8bf22fc28acbd441c3814764d63b63ef755f\\\"\"" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" podUID="a4e61264-26ad-4012-be6c-4d6596b4ab27" Sep 29 17:06:23 crc kubenswrapper[4592]: I0929 17:06:23.730594 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" podStartSLOduration=5.730573175 podStartE2EDuration="5.730573175s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:06:23.72855989 +0000 UTC m=+913.876337571" watchObservedRunningTime="2025-09-29 17:06:23.730573175 +0000 UTC m=+913.878350856" Sep 29 17:06:31 crc kubenswrapper[4592]: I0929 17:06:31.142861 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7b9c4c58f5-fdd5n" Sep 29 17:06:33 crc kubenswrapper[4592]: E0929 17:06:33.501918 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a" Sep 29 17:06:33 crc kubenswrapper[4592]: E0929 17:06:33.502415 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:23fcec0642cbd40af10bca0c5d4e538662d21eda98d6dfec37c38b4d7a47191a,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-srwp7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-5bd55b4bff-9n2d2_openstack-operators(dff5de8b-2910-4e5a-a80a-089c649039cd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.028078 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.028368 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:e6f1ed6b386f77415c2a44e770d98ab6d16b6f6b494c4d1b4ac4b46368c4a4e6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n52dj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-7975b88857-kjsck_openstack-operators(a4b81165-b69a-40fa-b875-6d138351d6e6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.519496 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.520477 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:1051afc168038fb814f75e7a5f07c588b295a83ebd143dcd8b46d799e31ad302,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l29ww,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-9976ff44c-l97v2_openstack-operators(bb38bf88-f05d-4e0e-8923-66b2097e247c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.983045 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884" Sep 29 17:06:34 crc kubenswrapper[4592]: E0929 17:06:34.983315 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:4cdb30423c14ab48888aeeb699259bd9051284ec9f874ed9bab94c7965f45884,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b426w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-6d68dbc695-cffzv_openstack-operators(bfa2f914-2596-49e6-bb75-760663a69813): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:35 crc kubenswrapper[4592]: E0929 17:06:35.474080 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72" Sep 29 17:06:35 crc kubenswrapper[4592]: E0929 17:06:35.474295 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:21792a2317c0a55e40b2a02a7d5d4682b76538ed2a2e0633199aa395e60ecc72,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wd6wl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-84958c4d49-k969c_openstack-operators(f8504fb5-9c3b-4b51-bf22-31c6bcdacad4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:35 crc kubenswrapper[4592]: E0929 17:06:35.882273 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10" Sep 29 17:06:35 crc kubenswrapper[4592]: E0929 17:06:35.882512 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:485df5c7813cdf4cf21f48ec48c8e3e4962fee6a1ae4c64f7af127d5ab346a10,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6kfrg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-64d7b59854-7sgxz_openstack-operators(fd9f041b-9fd6-4d50-bc82-35fd86eea539): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:37 crc kubenswrapper[4592]: E0929 17:06:37.325406 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd" Sep 29 17:06:37 crc kubenswrapper[4592]: E0929 17:06:37.325906 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:bb39758cc8cd0d2cd02841dc81b53fd88647e2db15ee16cdd8c44d4098a942fd,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dkxpl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6ff8b75857-55xvk_openstack-operators(7ef58432-073e-43a5-bc36-38cb3611b118): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:42 crc kubenswrapper[4592]: E0929 17:06:42.676465 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2" Sep 29 17:06:42 crc kubenswrapper[4592]: E0929 17:06:42.677316 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6kx6d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-589c58c6c-j5st9_openstack-operators(1100f7ed-81d3-49d8-9852-867de93e273b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:43 crc kubenswrapper[4592]: E0929 17:06:43.770519 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1e2c65f4331a2bb568d97fbcd02e3bca2627e133a794e1e4fd13368e86ce6bd1" Sep 29 17:06:43 crc kubenswrapper[4592]: E0929 17:06:43.770739 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1e2c65f4331a2bb568d97fbcd02e3bca2627e133a794e1e4fd13368e86ce6bd1,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-slpmc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-644bddb6d8-5p559_openstack-operators(de451eb0-13ae-4fab-a6f3-3cc8fb77566f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:06:46 crc kubenswrapper[4592]: E0929 17:06:46.948183 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" podUID="a4b81165-b69a-40fa-b875-6d138351d6e6" Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.511001 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" podUID="dff5de8b-2910-4e5a-a80a-089c649039cd" Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.884078 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" event={"ID":"1100f7ed-81d3-49d8-9852-867de93e273b","Type":"ContainerStarted","Data":"61ef27ccb8a50b2ffebab9191a884b75d48d6ce81cbdc3de72d03b4934e9872e"} Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.885475 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" event={"ID":"dff5de8b-2910-4e5a-a80a-089c649039cd","Type":"ContainerStarted","Data":"5c38ba52898df3f03d8067c68acfea2388df637f5a62437fe41015b9f07c62d9"} Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.887757 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" event={"ID":"1d74dab2-fe04-4218-8b91-4b958b0ad39d","Type":"ContainerStarted","Data":"db9a67040d814ffd9e6582e5b1fef9afaf60dc9ecc2dd8baad84af50bb067a7e"} Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.890651 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" event={"ID":"a4b81165-b69a-40fa-b875-6d138351d6e6","Type":"ContainerStarted","Data":"148c85b75d15cd79c71f7163ce41593984f387bacf34e46c2eaeff35777c1c83"} Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.902103 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" podUID="f8504fb5-9c3b-4b51-bf22-31c6bcdacad4" Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.902618 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" podUID="bb38bf88-f05d-4e0e-8923-66b2097e247c" Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.903363 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" podUID="1100f7ed-81d3-49d8-9852-867de93e273b" Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.916197 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" event={"ID":"50b8b2f8-551f-4379-84b6-5b217fa8b50c","Type":"ContainerStarted","Data":"d9add22298dd5fadc78c9cfce3b8146a7f7729689e9c6927849922450b348842"} Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.916313 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" podUID="fd9f041b-9fd6-4d50-bc82-35fd86eea539" Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.945747 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" event={"ID":"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a","Type":"ContainerStarted","Data":"344ea524e1951e050b3d196f3213faed3fc9f8f1a99fef7fcd672977e42a6f3d"} Sep 29 17:06:47 crc kubenswrapper[4592]: E0929 17:06:47.958504 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" podUID="de451eb0-13ae-4fab-a6f3-3cc8fb77566f" Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.972925 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" event={"ID":"9c565c72-206a-42a7-943d-c55fd9065e5f","Type":"ContainerStarted","Data":"a72522fc75087a4f5c1b1fa6403f5e3da2caaa365752d1a4289b428a2bba2eab"} Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.979413 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" event={"ID":"bb38bf88-f05d-4e0e-8923-66b2097e247c","Type":"ContainerStarted","Data":"b3701e8fa9297ded5272b47b289c56332df509733f0250de56d49bbb07d25c85"} Sep 29 17:06:47 crc kubenswrapper[4592]: I0929 17:06:47.987005 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" event={"ID":"fd9f041b-9fd6-4d50-bc82-35fd86eea539","Type":"ContainerStarted","Data":"b4377a8413aa47bf6d2c5862be39488227cbdb6bfbd7d01660f25aaa3389b842"} Sep 29 17:06:48 crc kubenswrapper[4592]: I0929 17:06:48.001970 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" event={"ID":"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4","Type":"ContainerStarted","Data":"dc4b6391a14f244f7cb0737e832799dcd816868625bbf1b688f040d098488ac5"} Sep 29 17:06:48 crc kubenswrapper[4592]: I0929 17:06:48.011189 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" event={"ID":"6bf183ea-90d6-4aff-9e61-d4cc3692fe08","Type":"ContainerStarted","Data":"eeb2f1afe6083f62712245d195ab683ad24c177080dded6e34cc294ea1ccab12"} Sep 29 17:06:48 crc kubenswrapper[4592]: E0929 17:06:48.227721 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" podUID="7ef58432-073e-43a5-bc36-38cb3611b118" Sep 29 17:06:48 crc kubenswrapper[4592]: E0929 17:06:48.250920 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" podUID="bfa2f914-2596-49e6-bb75-760663a69813" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.018418 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" event={"ID":"9974276a-24ba-4ca1-9c70-f85e17e9c10c","Type":"ContainerStarted","Data":"4bbca1f64a284b49526f14f047d6f70ef78285482ce09e5e3efa307186f458c8"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.018845 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.020529 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" event={"ID":"33a1eea4-82d2-438a-a844-6539c3016172","Type":"ContainerStarted","Data":"2cd9f362f32e32aaa018d52e67ad236c3a5ab3d3b0ae52b3b0c92582d76a2fa0"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.021446 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.022014 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" event={"ID":"ba256bd8-c14c-458e-b919-2feedb3a0c46","Type":"ContainerStarted","Data":"a432145a3657400c24014ed190da091010093057151cb0ec308f36789641dd86"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.023535 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" event={"ID":"bfa2f914-2596-49e6-bb75-760663a69813","Type":"ContainerStarted","Data":"e7f63f856275c6c65fc075289375af7c7e0c4d53b21fbb7b33134b8a437d1ceb"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.026059 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" event={"ID":"7ef58432-073e-43a5-bc36-38cb3611b118","Type":"ContainerStarted","Data":"df141c592c208a31d4e0c5b5acd4212a8aae2b359a6bdc7695f356d39ef491a7"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.033333 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" event={"ID":"965c0641-f6e8-44e3-a8a1-32028665b9e2","Type":"ContainerStarted","Data":"f6ab2504a85d510771571773dcdeff293e12a74ca34be5d152836bc06ab664a7"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.040457 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" event={"ID":"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4","Type":"ContainerStarted","Data":"3939b680c2459c5a433a6d7c51ac1674ba31ecf91cf3ae6754fca3bf7488fda3"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.055787 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" event={"ID":"5746404b-3a0f-4851-9de9-28e4e7ef8f1f","Type":"ContainerStarted","Data":"22ebae4a72b5b95cb79b53b71f68724637fc7d404762197d2190f6633978699b"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.075056 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" podStartSLOduration=4.910675152 podStartE2EDuration="31.075038573s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.808417512 +0000 UTC m=+910.956195193" lastFinishedPulling="2025-09-29 17:06:46.972780943 +0000 UTC m=+937.120558614" observedRunningTime="2025-09-29 17:06:49.071236667 +0000 UTC m=+939.219014348" watchObservedRunningTime="2025-09-29 17:06:49.075038573 +0000 UTC m=+939.222816254" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.099455 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" event={"ID":"a4e61264-26ad-4012-be6c-4d6596b4ab27","Type":"ContainerStarted","Data":"6d2c0fb12bd6fa32840ccb2b5f7949b5b9b13ca94b9428ea62d787c6c7bdb62e"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.100201 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.134740 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" event={"ID":"de451eb0-13ae-4fab-a6f3-3cc8fb77566f","Type":"ContainerStarted","Data":"43985410fe858cc1b8f40fdc8acf44ecf1b735ad4807e645aefa600b97896c52"} Sep 29 17:06:49 crc kubenswrapper[4592]: E0929 17:06:49.136751 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:1e2c65f4331a2bb568d97fbcd02e3bca2627e133a794e1e4fd13368e86ce6bd1\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" podUID="de451eb0-13ae-4fab-a6f3-3cc8fb77566f" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.146702 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" event={"ID":"a3543654-318a-48ed-8109-a76d758b231d","Type":"ContainerStarted","Data":"6d0a93a46e002ff262f38312909244f56db0bd5f01bac260a726dcbbaa2c5437"} Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.146837 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:06:49 crc kubenswrapper[4592]: E0929 17:06:49.148447 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:a6b3408d79df6b6d4a467e49defaa4a9d9c088c94d0605a4fee0030c9ccc84d2\\\"\"" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" podUID="1100f7ed-81d3-49d8-9852-867de93e273b" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.192831 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" podStartSLOduration=5.058632991 podStartE2EDuration="31.19281571s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.834336017 +0000 UTC m=+910.982113698" lastFinishedPulling="2025-09-29 17:06:46.968518736 +0000 UTC m=+937.116296417" observedRunningTime="2025-09-29 17:06:49.190402663 +0000 UTC m=+939.338180344" watchObservedRunningTime="2025-09-29 17:06:49.19281571 +0000 UTC m=+939.340593391" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.240644 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" podStartSLOduration=6.111908308 podStartE2EDuration="32.240629518s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.83412643 +0000 UTC m=+910.981904121" lastFinishedPulling="2025-09-29 17:06:46.96284765 +0000 UTC m=+937.110625331" observedRunningTime="2025-09-29 17:06:49.219924556 +0000 UTC m=+939.367702237" watchObservedRunningTime="2025-09-29 17:06:49.240629518 +0000 UTC m=+939.388407189" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.300892 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-79d8469568-2pcjb" podStartSLOduration=4.15294289 podStartE2EDuration="30.300877598s" podCreationTimestamp="2025-09-29 17:06:19 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.834272085 +0000 UTC m=+910.982049766" lastFinishedPulling="2025-09-29 17:06:46.982206793 +0000 UTC m=+937.129984474" observedRunningTime="2025-09-29 17:06:49.273996048 +0000 UTC m=+939.421773729" watchObservedRunningTime="2025-09-29 17:06:49.300877598 +0000 UTC m=+939.448655279" Sep 29 17:06:49 crc kubenswrapper[4592]: I0929 17:06:49.334268 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" podStartSLOduration=5.241491821 podStartE2EDuration="31.334249649s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.808591996 +0000 UTC m=+910.956369677" lastFinishedPulling="2025-09-29 17:06:46.901349824 +0000 UTC m=+937.049127505" observedRunningTime="2025-09-29 17:06:49.333007115 +0000 UTC m=+939.480784796" watchObservedRunningTime="2025-09-29 17:06:49.334249649 +0000 UTC m=+939.482027330" Sep 29 17:06:50 crc kubenswrapper[4592]: E0929 17:06:50.154769 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:1e2c65f4331a2bb568d97fbcd02e3bca2627e133a794e1e4fd13368e86ce6bd1\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" podUID="de451eb0-13ae-4fab-a6f3-3cc8fb77566f" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.168452 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" event={"ID":"9c565c72-206a-42a7-943d-c55fd9065e5f","Type":"ContainerStarted","Data":"28196090d614a30a2083007b0a79aaffa5c447b70343e1cf0c3acfa38ca49934"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.168621 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.172817 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" event={"ID":"ba256bd8-c14c-458e-b919-2feedb3a0c46","Type":"ContainerStarted","Data":"f0cd4311d82ac0f80818b5e907dd09c0b5b009bfde1b430cc128dcc8b735b074"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.173192 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.175737 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" event={"ID":"87bb1f2b-bc93-4b10-aa27-b8efd9ba669a","Type":"ContainerStarted","Data":"c1a803f2de3c2d2006034e56d9d43995cb29605bba4324d15324df92bbc6c35c"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.176246 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.176974 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.178065 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" event={"ID":"1d74dab2-fe04-4218-8b91-4b958b0ad39d","Type":"ContainerStarted","Data":"bf935cce0dd3f056c9b50a502b57e4d3b20a98c52c681fe733fb52f7ab2fc6c1"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.178356 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.178626 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.180050 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" event={"ID":"965c0641-f6e8-44e3-a8a1-32028665b9e2","Type":"ContainerStarted","Data":"6f5b4bf33b21592c42f8906c8ed600df0f5f49357d37593c3a4bf75e9af432eb"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.180188 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.191664 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" event={"ID":"5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4","Type":"ContainerStarted","Data":"cfb45fb8187a6180f56e4904b238d6331bd5da149d36f86e8e69fda04406768a"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.191763 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.192743 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.192828 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.195028 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" event={"ID":"5746404b-3a0f-4851-9de9-28e4e7ef8f1f","Type":"ContainerStarted","Data":"57aa2092daeac5f258699356202b6ab3c2045df14b6b9fdddfd9bfdb6753737d"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.198219 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.199953 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.199981 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5d889d78cf-f8v56" podStartSLOduration=18.87153162 podStartE2EDuration="35.199965026s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:19.506288432 +0000 UTC m=+909.654066113" lastFinishedPulling="2025-09-29 17:06:35.834721838 +0000 UTC m=+925.982499519" observedRunningTime="2025-09-29 17:06:52.185778625 +0000 UTC m=+942.333556326" watchObservedRunningTime="2025-09-29 17:06:52.199965026 +0000 UTC m=+942.347742707" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.201465 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" event={"ID":"6bf183ea-90d6-4aff-9e61-d4cc3692fe08","Type":"ContainerStarted","Data":"4a38198625e57066b634f74ae4d423fc887b3cf00e4f70432dad3d490fb06ecc"} Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.202525 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.202691 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.206414 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.207129 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.218655 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6d776955-fqfbf" podStartSLOduration=9.323625356 podStartE2EDuration="34.218633711s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.967277372 +0000 UTC m=+911.115055053" lastFinishedPulling="2025-09-29 17:06:45.862285727 +0000 UTC m=+936.010063408" observedRunningTime="2025-09-29 17:06:52.215380492 +0000 UTC m=+942.363158183" watchObservedRunningTime="2025-09-29 17:06:52.218633711 +0000 UTC m=+942.366411512" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.275745 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-84f4f7b77b-bj7fn" podStartSLOduration=10.153780584 podStartE2EDuration="35.275726375s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.739372589 +0000 UTC m=+910.887150270" lastFinishedPulling="2025-09-29 17:06:45.86131838 +0000 UTC m=+936.009096061" observedRunningTime="2025-09-29 17:06:52.242608291 +0000 UTC m=+942.390385982" watchObservedRunningTime="2025-09-29 17:06:52.275726375 +0000 UTC m=+942.423504056" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.293337 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-76fcc6dc7c-fc5kq" podStartSLOduration=9.173066223 podStartE2EDuration="34.293315349s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.742078323 +0000 UTC m=+910.889856004" lastFinishedPulling="2025-09-29 17:06:45.862327449 +0000 UTC m=+936.010105130" observedRunningTime="2025-09-29 17:06:52.276816774 +0000 UTC m=+942.424594455" watchObservedRunningTime="2025-09-29 17:06:52.293315349 +0000 UTC m=+942.441093030" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.362371 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-9f4696d94-p4r66" podStartSLOduration=8.053797016 podStartE2EDuration="35.362343963s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:19.59289222 +0000 UTC m=+909.740669901" lastFinishedPulling="2025-09-29 17:06:46.901439167 +0000 UTC m=+937.049216848" observedRunningTime="2025-09-29 17:06:52.32741257 +0000 UTC m=+942.475190251" watchObservedRunningTime="2025-09-29 17:06:52.362343963 +0000 UTC m=+942.510121654" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.389029 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-b8d54b5d7-77pht" podStartSLOduration=9.22989979 podStartE2EDuration="34.388995057s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.704346902 +0000 UTC m=+910.852124583" lastFinishedPulling="2025-09-29 17:06:45.863442169 +0000 UTC m=+936.011219850" observedRunningTime="2025-09-29 17:06:52.383924138 +0000 UTC m=+942.531701819" watchObservedRunningTime="2025-09-29 17:06:52.388995057 +0000 UTC m=+942.536772738" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.405317 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-88c7-ssmqx" podStartSLOduration=8.794858387 podStartE2EDuration="34.405299127s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.25231154 +0000 UTC m=+910.400089221" lastFinishedPulling="2025-09-29 17:06:45.86275228 +0000 UTC m=+936.010529961" observedRunningTime="2025-09-29 17:06:52.401125872 +0000 UTC m=+942.548903553" watchObservedRunningTime="2025-09-29 17:06:52.405299127 +0000 UTC m=+942.553076808" Sep 29 17:06:52 crc kubenswrapper[4592]: I0929 17:06:52.491758 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-c7c776c96-hg6b2" podStartSLOduration=9.421877814 podStartE2EDuration="34.4917375s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.793318586 +0000 UTC m=+910.941096267" lastFinishedPulling="2025-09-29 17:06:45.863178272 +0000 UTC m=+936.010955953" observedRunningTime="2025-09-29 17:06:52.489846608 +0000 UTC m=+942.637624289" watchObservedRunningTime="2025-09-29 17:06:52.4917375 +0000 UTC m=+942.639515181" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.215200 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" event={"ID":"fd9f041b-9fd6-4d50-bc82-35fd86eea539","Type":"ContainerStarted","Data":"e21a9e60d1b09de566c5334380d016b298bbb0d1f01d21cd3faac208b95d023a"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.215299 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.217196 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" event={"ID":"f8504fb5-9c3b-4b51-bf22-31c6bcdacad4","Type":"ContainerStarted","Data":"0f64118febcfd4f5fd8b225306a5b83edf2422635bd2846c92eecdc58794a329"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.217272 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.219106 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" event={"ID":"dff5de8b-2910-4e5a-a80a-089c649039cd","Type":"ContainerStarted","Data":"374ed372c5734c57520c8905155abbf547d45e4c6bd9c05272147365545a777f"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.219247 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.220749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" event={"ID":"bb38bf88-f05d-4e0e-8923-66b2097e247c","Type":"ContainerStarted","Data":"82201ef9b6df44033d38d699a4baf5ffe117546a4e69eac83a88a953794fbcb2"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.221203 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.223272 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" event={"ID":"bfa2f914-2596-49e6-bb75-760663a69813","Type":"ContainerStarted","Data":"ffad2b7d641bac615122285200ad015371f345758e8195841e76970a93b2bf2a"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.223443 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.234096 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" event={"ID":"a4b81165-b69a-40fa-b875-6d138351d6e6","Type":"ContainerStarted","Data":"ad03803ea557b5a6fba3810a8692ce245da7ed16dc2fed2d76d9ece7fe631fa5"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.234897 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.239754 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" event={"ID":"7ef58432-073e-43a5-bc36-38cb3611b118","Type":"ContainerStarted","Data":"cea48b5bce66587b72fba31d7865c85c70381a378635bf04fb1a487451254937"} Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.242298 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.287644 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" podStartSLOduration=3.869132465 podStartE2EDuration="36.287617862s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.808199026 +0000 UTC m=+910.955976707" lastFinishedPulling="2025-09-29 17:06:53.226684423 +0000 UTC m=+943.374462104" observedRunningTime="2025-09-29 17:06:54.251341122 +0000 UTC m=+944.399118803" watchObservedRunningTime="2025-09-29 17:06:54.287617862 +0000 UTC m=+944.435395543" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.323660 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" podStartSLOduration=4.352852571 podStartE2EDuration="37.323646145s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.258195902 +0000 UTC m=+910.405973583" lastFinishedPulling="2025-09-29 17:06:53.228989466 +0000 UTC m=+943.376767157" observedRunningTime="2025-09-29 17:06:54.29149872 +0000 UTC m=+944.439276401" watchObservedRunningTime="2025-09-29 17:06:54.323646145 +0000 UTC m=+944.471423826" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.324708 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" podStartSLOduration=5.3911501170000005 podStartE2EDuration="36.324704144s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.785111479 +0000 UTC m=+910.932889170" lastFinishedPulling="2025-09-29 17:06:51.718665516 +0000 UTC m=+941.866443197" observedRunningTime="2025-09-29 17:06:54.322282608 +0000 UTC m=+944.470060289" watchObservedRunningTime="2025-09-29 17:06:54.324704144 +0000 UTC m=+944.472481825" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.347611 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" podStartSLOduration=3.415451638 podStartE2EDuration="36.347592336s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.285576257 +0000 UTC m=+910.433353938" lastFinishedPulling="2025-09-29 17:06:53.217716955 +0000 UTC m=+943.365494636" observedRunningTime="2025-09-29 17:06:54.345662803 +0000 UTC m=+944.493440484" watchObservedRunningTime="2025-09-29 17:06:54.347592336 +0000 UTC m=+944.495370017" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.381180 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" podStartSLOduration=3.426956626 podStartE2EDuration="36.381158792s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.273826623 +0000 UTC m=+910.421604304" lastFinishedPulling="2025-09-29 17:06:53.228028799 +0000 UTC m=+943.375806470" observedRunningTime="2025-09-29 17:06:54.374497687 +0000 UTC m=+944.522275388" watchObservedRunningTime="2025-09-29 17:06:54.381158792 +0000 UTC m=+944.528936483" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.397094 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" podStartSLOduration=4.162653777 podStartE2EDuration="37.39707452s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:19.990799449 +0000 UTC m=+910.138577130" lastFinishedPulling="2025-09-29 17:06:53.225220192 +0000 UTC m=+943.372997873" observedRunningTime="2025-09-29 17:06:54.39488154 +0000 UTC m=+944.542659221" watchObservedRunningTime="2025-09-29 17:06:54.39707452 +0000 UTC m=+944.544852201" Sep 29 17:06:54 crc kubenswrapper[4592]: I0929 17:06:54.414477 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" podStartSLOduration=4.082087997 podStartE2EDuration="37.41446007s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:19.895085081 +0000 UTC m=+910.042862762" lastFinishedPulling="2025-09-29 17:06:53.227457154 +0000 UTC m=+943.375234835" observedRunningTime="2025-09-29 17:06:54.409210905 +0000 UTC m=+944.556988586" watchObservedRunningTime="2025-09-29 17:06:54.41446007 +0000 UTC m=+944.562237751" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.179235 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6ff8b75857-55xvk" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.225539 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-84958c4d49-k969c" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.491011 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-7975b88857-kjsck" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.612513 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6d68dbc695-cffzv" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.618483 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5bd55b4bff-9n2d2" Sep 29 17:06:58 crc kubenswrapper[4592]: I0929 17:06:58.968654 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-9976ff44c-l97v2" Sep 29 17:06:59 crc kubenswrapper[4592]: I0929 17:06:59.049337 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-64d7b59854-7sgxz" Sep 29 17:06:59 crc kubenswrapper[4592]: I0929 17:06:59.064325 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-bc7dc7bd9-pfzkm" Sep 29 17:06:59 crc kubenswrapper[4592]: I0929 17:06:59.332663 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-f66b554c6-mxl29" Sep 29 17:06:59 crc kubenswrapper[4592]: I0929 17:06:59.366989 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-76669f99c-mqwzl" Sep 29 17:07:00 crc kubenswrapper[4592]: I0929 17:07:00.142328 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-7d857cc749-b8fbf" Sep 29 17:07:01 crc kubenswrapper[4592]: I0929 17:07:01.291558 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" event={"ID":"1100f7ed-81d3-49d8-9852-867de93e273b","Type":"ContainerStarted","Data":"e88e5a78d4a25e9c067cc066c4667000915cb6d70512c5e7944330c9eabffc17"} Sep 29 17:07:01 crc kubenswrapper[4592]: I0929 17:07:01.292124 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:07:01 crc kubenswrapper[4592]: I0929 17:07:01.309052 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" podStartSLOduration=3.011390298 podStartE2EDuration="43.309032949s" podCreationTimestamp="2025-09-29 17:06:18 +0000 UTC" firstStartedPulling="2025-09-29 17:06:20.725838515 +0000 UTC m=+910.873616196" lastFinishedPulling="2025-09-29 17:07:01.023481156 +0000 UTC m=+951.171258847" observedRunningTime="2025-09-29 17:07:01.30581936 +0000 UTC m=+951.453597041" watchObservedRunningTime="2025-09-29 17:07:01.309032949 +0000 UTC m=+951.456810650" Sep 29 17:07:06 crc kubenswrapper[4592]: I0929 17:07:06.322627 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" event={"ID":"de451eb0-13ae-4fab-a6f3-3cc8fb77566f","Type":"ContainerStarted","Data":"c9bdad32f1b79aeb7ae523ee90fdc9c14e852dc48e67c2d2009c76849a131c7b"} Sep 29 17:07:06 crc kubenswrapper[4592]: I0929 17:07:06.323639 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:07:06 crc kubenswrapper[4592]: I0929 17:07:06.358557 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" podStartSLOduration=3.305178158 podStartE2EDuration="49.358536274s" podCreationTimestamp="2025-09-29 17:06:17 +0000 UTC" firstStartedPulling="2025-09-29 17:06:19.745330363 +0000 UTC m=+909.893108044" lastFinishedPulling="2025-09-29 17:07:05.798688479 +0000 UTC m=+955.946466160" observedRunningTime="2025-09-29 17:07:06.350249476 +0000 UTC m=+956.498027157" watchObservedRunningTime="2025-09-29 17:07:06.358536274 +0000 UTC m=+956.506313955" Sep 29 17:07:09 crc kubenswrapper[4592]: I0929 17:07:09.055980 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-589c58c6c-j5st9" Sep 29 17:07:18 crc kubenswrapper[4592]: I0929 17:07:18.166222 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-644bddb6d8-5p559" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.609979 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.614350 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.620513 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.621237 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.626407 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-j6bk7" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.626727 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.629674 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.678521 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.679689 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.688412 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.693332 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.799578 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.799725 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frsnf\" (UniqueName: \"kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.799776 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtrck\" (UniqueName: \"kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.799890 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.799928 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.900620 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.900926 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.901026 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.901128 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frsnf\" (UniqueName: \"kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.901250 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtrck\" (UniqueName: \"kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.901844 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.901984 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.902275 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.923989 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frsnf\" (UniqueName: \"kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf\") pod \"dnsmasq-dns-78dd6ddcc-8k9rn\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.930333 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtrck\" (UniqueName: \"kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck\") pod \"dnsmasq-dns-675f4bcbfc-ssw6z\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.949185 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:33 crc kubenswrapper[4592]: I0929 17:07:33.998691 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:34 crc kubenswrapper[4592]: I0929 17:07:34.371649 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:34 crc kubenswrapper[4592]: I0929 17:07:34.380809 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:07:34 crc kubenswrapper[4592]: I0929 17:07:34.481554 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:34 crc kubenswrapper[4592]: W0929 17:07:34.487268 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0c9e5c6_c84b_446f_bd87_3c9b9ad2f36b.slice/crio-4373da6acc0800afbc74bbaadf31f3841284539aa13377ab91c44add6e236c58 WatchSource:0}: Error finding container 4373da6acc0800afbc74bbaadf31f3841284539aa13377ab91c44add6e236c58: Status 404 returned error can't find the container with id 4373da6acc0800afbc74bbaadf31f3841284539aa13377ab91c44add6e236c58 Sep 29 17:07:34 crc kubenswrapper[4592]: I0929 17:07:34.511710 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" event={"ID":"86e11690-8edd-4be1-a94c-1bffc3890248","Type":"ContainerStarted","Data":"90dd9bdcdca95fe4b7e8691cb36b7b4038ce41eacc1139ab8e79b685b2077b8e"} Sep 29 17:07:34 crc kubenswrapper[4592]: I0929 17:07:34.514094 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" event={"ID":"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b","Type":"ContainerStarted","Data":"4373da6acc0800afbc74bbaadf31f3841284539aa13377ab91c44add6e236c58"} Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.595634 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.625914 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.627092 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.651409 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.670041 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.670080 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qth8w\" (UniqueName: \"kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.670108 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.771507 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.771834 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qth8w\" (UniqueName: \"kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.771870 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.772814 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.773379 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.807122 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qth8w\" (UniqueName: \"kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w\") pod \"dnsmasq-dns-666b6646f7-qq7dq\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.961238 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:36 crc kubenswrapper[4592]: I0929 17:07:36.962737 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.006527 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.007756 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.069049 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.075733 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.075800 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.075845 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgn6p\" (UniqueName: \"kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.177238 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.177316 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgn6p\" (UniqueName: \"kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.177387 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.178526 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.178563 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.219961 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgn6p\" (UniqueName: \"kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p\") pod \"dnsmasq-dns-57d769cc4f-cxttg\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.360983 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.651988 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.825517 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.829764 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.834694 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.835023 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.835171 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6kbmf" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.841404 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.841459 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.841583 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.841735 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.857356 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.894980 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895044 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895067 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895126 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895173 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895223 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895246 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895285 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895309 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.895328 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxv2k\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.967792 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.995977 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996016 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996033 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996046 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxv2k\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996092 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996117 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996134 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996169 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996191 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996206 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.996237 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.999124 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Sep 29 17:07:37 crc kubenswrapper[4592]: I0929 17:07:37.999774 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.000428 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.000492 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.000699 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.003388 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.006094 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.006796 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.011842 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.017543 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxv2k\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.030662 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.038328 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.185388 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.198005 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.222035 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.222185 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.225402 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.225555 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.225951 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.226366 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-xgpjr" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.228350 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.228478 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.231234 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302588 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302638 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302670 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302691 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302710 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302735 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302774 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302819 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.302985 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.303007 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcdhq\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.303040 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.403803 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404034 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404050 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404071 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcdhq\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404103 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404128 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404169 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404199 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404221 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404257 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404295 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404712 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.404902 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.405048 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.405982 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.406311 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.407837 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.413304 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.415797 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.418841 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.425451 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcdhq\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.428406 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.432198 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.553097 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" event={"ID":"5cdeea8f-847e-49a2-8f8f-2d04429192c2","Type":"ContainerStarted","Data":"955bdd8a785f51a7c47eef4d45a5f82f5ebcae02147038cbae7a91c798485c29"} Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.558591 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" event={"ID":"436f5852-e2a7-4374-ad72-e9f4c63a046b","Type":"ContainerStarted","Data":"7448b54708981054d894e8bbe7e384e269edcc8cd03e6ee096e58a98449e21c0"} Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.564214 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:07:38 crc kubenswrapper[4592]: I0929 17:07:38.722624 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:07:38 crc kubenswrapper[4592]: W0929 17:07:38.759724 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62319168_243a_4613_a565_d864d75110e2.slice/crio-121eb8a448941d99d5c57493edc537928a98b24afc70c69eaa73901686adbdc7 WatchSource:0}: Error finding container 121eb8a448941d99d5c57493edc537928a98b24afc70c69eaa73901686adbdc7: Status 404 returned error can't find the container with id 121eb8a448941d99d5c57493edc537928a98b24afc70c69eaa73901686adbdc7 Sep 29 17:07:39 crc kubenswrapper[4592]: I0929 17:07:39.170911 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:07:39 crc kubenswrapper[4592]: W0929 17:07:39.230140 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9840d1a_98b1_4ff7_9140_d21bacc11b0a.slice/crio-0c0ddd4903d76dca2c5363c527ffaae2c35560bb14bff084400b869bb36c1b69 WatchSource:0}: Error finding container 0c0ddd4903d76dca2c5363c527ffaae2c35560bb14bff084400b869bb36c1b69: Status 404 returned error can't find the container with id 0c0ddd4903d76dca2c5363c527ffaae2c35560bb14bff084400b869bb36c1b69 Sep 29 17:07:39 crc kubenswrapper[4592]: I0929 17:07:39.582754 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerStarted","Data":"0c0ddd4903d76dca2c5363c527ffaae2c35560bb14bff084400b869bb36c1b69"} Sep 29 17:07:39 crc kubenswrapper[4592]: I0929 17:07:39.585272 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerStarted","Data":"121eb8a448941d99d5c57493edc537928a98b24afc70c69eaa73901686adbdc7"} Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.795467 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.796852 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.803214 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.803307 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.803493 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.803643 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.805564 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-c8cb9" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.818404 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.824427 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.863602 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.866400 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.868301 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.868522 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.868691 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.868825 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-5gzrl" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.881365 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958360 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958405 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958459 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-default\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958501 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-secrets\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958526 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nrpq\" (UniqueName: \"kubernetes.io/projected/4c85a81f-2e67-4a6f-928b-d4735005cd43-kube-api-access-9nrpq\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958550 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958571 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958588 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:40 crc kubenswrapper[4592]: I0929 17:07:40.958609 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-kolla-config\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059733 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059811 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2krrp\" (UniqueName: \"kubernetes.io/projected/59ac4af6-5ade-49f1-8098-52e823dcf61f-kube-api-access-2krrp\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059849 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059874 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059910 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.059939 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-default\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060009 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060032 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060057 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060078 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060106 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-secrets\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060200 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nrpq\" (UniqueName: \"kubernetes.io/projected/4c85a81f-2e67-4a6f-928b-d4735005cd43-kube-api-access-9nrpq\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060241 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060268 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060295 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060324 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-kolla-config\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060357 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060383 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.060965 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.061305 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.064403 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.064475 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-config-data-default\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.065009 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c85a81f-2e67-4a6f-928b-d4735005cd43-kolla-config\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.068084 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.068381 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-secrets\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.089981 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c85a81f-2e67-4a6f-928b-d4735005cd43-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.094673 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nrpq\" (UniqueName: \"kubernetes.io/projected/4c85a81f-2e67-4a6f-928b-d4735005cd43-kube-api-access-9nrpq\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.101110 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-0\" (UID: \"4c85a81f-2e67-4a6f-928b-d4735005cd43\") " pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.135161 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165391 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165447 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165488 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165519 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2krrp\" (UniqueName: \"kubernetes.io/projected/59ac4af6-5ade-49f1-8098-52e823dcf61f-kube-api-access-2krrp\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165557 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165621 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165647 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165678 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.165699 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.166555 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.166866 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.167910 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.172541 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.173825 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.173960 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/59ac4af6-5ade-49f1-8098-52e823dcf61f-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.175886 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.178074 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/59ac4af6-5ade-49f1-8098-52e823dcf61f-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.194243 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2krrp\" (UniqueName: \"kubernetes.io/projected/59ac4af6-5ade-49f1-8098-52e823dcf61f-kube-api-access-2krrp\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.202246 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"59ac4af6-5ade-49f1-8098-52e823dcf61f\") " pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.366057 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.370304 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.373461 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-b6jht" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.373784 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.376290 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.398721 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.470189 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-kolla-config\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.470265 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.470308 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqghq\" (UniqueName: \"kubernetes.io/projected/1ed52369-92ab-4da4-a517-1555c79b0a38-kube-api-access-hqghq\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.470354 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-config-data\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.470386 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.498517 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.571906 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.571959 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqghq\" (UniqueName: \"kubernetes.io/projected/1ed52369-92ab-4da4-a517-1555c79b0a38-kube-api-access-hqghq\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.571996 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-config-data\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.572020 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.572086 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-kolla-config\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.572842 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-kolla-config\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.572941 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1ed52369-92ab-4da4-a517-1555c79b0a38-config-data\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.576007 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-memcached-tls-certs\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.583509 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ed52369-92ab-4da4-a517-1555c79b0a38-combined-ca-bundle\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.622871 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqghq\" (UniqueName: \"kubernetes.io/projected/1ed52369-92ab-4da4-a517-1555c79b0a38-kube-api-access-hqghq\") pod \"memcached-0\" (UID: \"1ed52369-92ab-4da4-a517-1555c79b0a38\") " pod="openstack/memcached-0" Sep 29 17:07:41 crc kubenswrapper[4592]: I0929 17:07:41.699505 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.625332 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.626655 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.631927 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-2q4hl" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.644454 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.704017 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rrps\" (UniqueName: \"kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps\") pod \"kube-state-metrics-0\" (UID: \"27cb9cb7-75ba-479b-ad1e-2e47beaefae4\") " pod="openstack/kube-state-metrics-0" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.806964 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rrps\" (UniqueName: \"kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps\") pod \"kube-state-metrics-0\" (UID: \"27cb9cb7-75ba-479b-ad1e-2e47beaefae4\") " pod="openstack/kube-state-metrics-0" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.842828 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rrps\" (UniqueName: \"kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps\") pod \"kube-state-metrics-0\" (UID: \"27cb9cb7-75ba-479b-ad1e-2e47beaefae4\") " pod="openstack/kube-state-metrics-0" Sep 29 17:07:43 crc kubenswrapper[4592]: I0929 17:07:43.958688 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.557268 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jfzwf"] Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.559016 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.563673 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.563748 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-bxxf5" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.563997 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.579087 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jfzwf"] Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.584994 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-7x4wp"] Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.591973 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.614584 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7x4wp"] Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.692525 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-ovn-controller-tls-certs\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693044 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-combined-ca-bundle\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693185 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj5fq\" (UniqueName: \"kubernetes.io/projected/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-kube-api-access-rj5fq\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693254 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693349 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-scripts\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693453 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.693559 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-log-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.795474 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-log-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.795522 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd373ead-845f-4c4d-b9d7-38f8424697d5-scripts\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.795557 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-lib\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799094 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-ovn-controller-tls-certs\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799160 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-combined-ca-bundle\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799216 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj5fq\" (UniqueName: \"kubernetes.io/projected/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-kube-api-access-rj5fq\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799252 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-etc-ovs\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799270 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-run\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799290 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799346 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-scripts\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799363 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4bv9\" (UniqueName: \"kubernetes.io/projected/fd373ead-845f-4c4d-b9d7-38f8424697d5-kube-api-access-c4bv9\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799423 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-log\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799440 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799506 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-log-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799634 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.799842 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-var-run-ovn\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.806212 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-ovn-controller-tls-certs\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.815103 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj5fq\" (UniqueName: \"kubernetes.io/projected/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-kube-api-access-rj5fq\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.835350 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-scripts\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.837213 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d813cc31-c8ba-48c0-b523-3d2b3fbc3341-combined-ca-bundle\") pod \"ovn-controller-jfzwf\" (UID: \"d813cc31-c8ba-48c0-b523-3d2b3fbc3341\") " pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.876927 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901177 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-etc-ovs\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901228 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-run\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901281 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4bv9\" (UniqueName: \"kubernetes.io/projected/fd373ead-845f-4c4d-b9d7-38f8424697d5-kube-api-access-c4bv9\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901332 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-run\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901491 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-etc-ovs\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901906 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-log\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.901980 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd373ead-845f-4c4d-b9d7-38f8424697d5-scripts\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.902022 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-lib\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.902418 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-log\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.906317 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/fd373ead-845f-4c4d-b9d7-38f8424697d5-var-lib\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.909328 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fd373ead-845f-4c4d-b9d7-38f8424697d5-scripts\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:47 crc kubenswrapper[4592]: I0929 17:07:47.922907 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4bv9\" (UniqueName: \"kubernetes.io/projected/fd373ead-845f-4c4d-b9d7-38f8424697d5-kube-api-access-c4bv9\") pod \"ovn-controller-ovs-7x4wp\" (UID: \"fd373ead-845f-4c4d-b9d7-38f8424697d5\") " pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.211625 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.769638 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.770823 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.778619 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-hbkc7" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.779442 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.780483 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.780674 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.780861 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.800937 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923064 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923120 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923184 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-config\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923239 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923270 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923285 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923328 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:48 crc kubenswrapper[4592]: I0929 17:07:48.923348 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrdwm\" (UniqueName: \"kubernetes.io/projected/b9e2d243-07ca-4b99-a929-9ae3321c3274-kube-api-access-wrdwm\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028115 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-config\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028282 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028353 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028377 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028446 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028494 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrdwm\" (UniqueName: \"kubernetes.io/projected/b9e2d243-07ca-4b99-a929-9ae3321c3274-kube-api-access-wrdwm\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028541 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.028591 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.029480 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.029840 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-config\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.029864 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.030342 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9e2d243-07ca-4b99-a929-9ae3321c3274-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.037121 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.037185 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.042236 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e2d243-07ca-4b99-a929-9ae3321c3274-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.044543 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrdwm\" (UniqueName: \"kubernetes.io/projected/b9e2d243-07ca-4b99-a929-9ae3321c3274-kube-api-access-wrdwm\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.079745 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b9e2d243-07ca-4b99-a929-9ae3321c3274\") " pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:49 crc kubenswrapper[4592]: I0929 17:07:49.093912 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.892605 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.896313 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.899454 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.899454 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.899579 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-ddtsd" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.899664 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Sep 29 17:07:50 crc kubenswrapper[4592]: I0929 17:07:50.922643 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065054 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065135 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065177 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ptd9\" (UniqueName: \"kubernetes.io/projected/1549c716-ca22-42ff-9cea-e63e50856936-kube-api-access-4ptd9\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065267 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1549c716-ca22-42ff-9cea-e63e50856936-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065333 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065449 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-config\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065547 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.065615 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.166928 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.166977 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ptd9\" (UniqueName: \"kubernetes.io/projected/1549c716-ca22-42ff-9cea-e63e50856936-kube-api-access-4ptd9\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167006 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1549c716-ca22-42ff-9cea-e63e50856936-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167030 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167087 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-config\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167130 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167195 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.167267 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.169257 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.169709 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1549c716-ca22-42ff-9cea-e63e50856936-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.171042 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-config\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.172572 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1549c716-ca22-42ff-9cea-e63e50856936-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.173273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.173279 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.189820 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ptd9\" (UniqueName: \"kubernetes.io/projected/1549c716-ca22-42ff-9cea-e63e50856936-kube-api-access-4ptd9\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.192261 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1549c716-ca22-42ff-9cea-e63e50856936-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.210063 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1549c716-ca22-42ff-9cea-e63e50856936\") " pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:51 crc kubenswrapper[4592]: I0929 17:07:51.225617 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.055269 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.055761 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cgn6p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-cxttg_openstack(5cdeea8f-847e-49a2-8f8f-2d04429192c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.057558 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.067411 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.067612 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qth8w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-qq7dq_openstack(436f5852-e2a7-4374-ad72-e9f4c63a046b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.068790 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.739298 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" Sep 29 17:07:56 crc kubenswrapper[4592]: E0929 17:07:56.739575 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.085985 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.086624 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wtrck,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-ssw6z_openstack(86e11690-8edd-4be1-a94c-1bffc3890248): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.088487 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" podUID="86e11690-8edd-4be1-a94c-1bffc3890248" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.105747 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.106059 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-frsnf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-8k9rn_openstack(c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:07:57 crc kubenswrapper[4592]: E0929 17:07:57.108136 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" podUID="c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b" Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.615432 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Sep 29 17:07:57 crc kubenswrapper[4592]: W0929 17:07:57.629102 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ed52369_92ab_4da4_a517_1555c79b0a38.slice/crio-19a43bd9f872d80f7026df2f02c9ca12a010953f5e35f2055495b7228aa75201 WatchSource:0}: Error finding container 19a43bd9f872d80f7026df2f02c9ca12a010953f5e35f2055495b7228aa75201: Status 404 returned error can't find the container with id 19a43bd9f872d80f7026df2f02c9ca12a010953f5e35f2055495b7228aa75201 Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.676441 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Sep 29 17:07:57 crc kubenswrapper[4592]: W0929 17:07:57.686304 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59ac4af6_5ade_49f1_8098_52e823dcf61f.slice/crio-047f853335e5e519ea00cd7d3d5e7e3db5f6fdf1972be9ab1e46ee36b05f96f7 WatchSource:0}: Error finding container 047f853335e5e519ea00cd7d3d5e7e3db5f6fdf1972be9ab1e46ee36b05f96f7: Status 404 returned error can't find the container with id 047f853335e5e519ea00cd7d3d5e7e3db5f6fdf1972be9ab1e46ee36b05f96f7 Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.743928 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.746618 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerStarted","Data":"6b3c15d30109ab050101c4ecbd3b5c0d3bea838101281d73bcfb26096ff58bad"} Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.748212 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerStarted","Data":"e76507d181ba89b027da1aa7409c60822aa6079a9886d55ec0a23fd0d49cae9f"} Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.750872 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"59ac4af6-5ade-49f1-8098-52e823dcf61f","Type":"ContainerStarted","Data":"047f853335e5e519ea00cd7d3d5e7e3db5f6fdf1972be9ab1e46ee36b05f96f7"} Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.753387 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1ed52369-92ab-4da4-a517-1555c79b0a38","Type":"ContainerStarted","Data":"19a43bd9f872d80f7026df2f02c9ca12a010953f5e35f2055495b7228aa75201"} Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.774316 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:07:57 crc kubenswrapper[4592]: I0929 17:07:57.994385 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jfzwf"] Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.169499 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.241367 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.284881 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Sep 29 17:07:58 crc kubenswrapper[4592]: W0929 17:07:58.294317 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1549c716_ca22_42ff_9cea_e63e50856936.slice/crio-c3325aac867e075e1bf9d5e1b15cef5ce2a7683a3cabdf993ffb88a812e791fc WatchSource:0}: Error finding container c3325aac867e075e1bf9d5e1b15cef5ce2a7683a3cabdf993ffb88a812e791fc: Status 404 returned error can't find the container with id c3325aac867e075e1bf9d5e1b15cef5ce2a7683a3cabdf993ffb88a812e791fc Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.299456 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config\") pod \"86e11690-8edd-4be1-a94c-1bffc3890248\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.299492 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config" (OuterVolumeSpecName: "config") pod "86e11690-8edd-4be1-a94c-1bffc3890248" (UID: "86e11690-8edd-4be1-a94c-1bffc3890248"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.299579 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frsnf\" (UniqueName: \"kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf\") pod \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300217 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc\") pod \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300276 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtrck\" (UniqueName: \"kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck\") pod \"86e11690-8edd-4be1-a94c-1bffc3890248\" (UID: \"86e11690-8edd-4be1-a94c-1bffc3890248\") " Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300321 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config\") pod \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\" (UID: \"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b\") " Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300310 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b" (UID: "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300881 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config" (OuterVolumeSpecName: "config") pod "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b" (UID: "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300939 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.300959 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86e11690-8edd-4be1-a94c-1bffc3890248-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.304839 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf" (OuterVolumeSpecName: "kube-api-access-frsnf") pod "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b" (UID: "c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b"). InnerVolumeSpecName "kube-api-access-frsnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.304903 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck" (OuterVolumeSpecName: "kube-api-access-wtrck") pod "86e11690-8edd-4be1-a94c-1bffc3890248" (UID: "86e11690-8edd-4be1-a94c-1bffc3890248"). InnerVolumeSpecName "kube-api-access-wtrck". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.402262 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtrck\" (UniqueName: \"kubernetes.io/projected/86e11690-8edd-4be1-a94c-1bffc3890248-kube-api-access-wtrck\") on node \"crc\" DevicePath \"\"" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.402301 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.402316 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frsnf\" (UniqueName: \"kubernetes.io/projected/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b-kube-api-access-frsnf\") on node \"crc\" DevicePath \"\"" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.763283 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf" event={"ID":"d813cc31-c8ba-48c0-b523-3d2b3fbc3341","Type":"ContainerStarted","Data":"f1cc77e3d9d34a6c8eeda6069a52a12db4d3e4f127aff6f1a3eba8b0bf42f95b"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.764804 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.764814 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-ssw6z" event={"ID":"86e11690-8edd-4be1-a94c-1bffc3890248","Type":"ContainerDied","Data":"90dd9bdcdca95fe4b7e8691cb36b7b4038ce41eacc1139ab8e79b685b2077b8e"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.766184 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" event={"ID":"c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b","Type":"ContainerDied","Data":"4373da6acc0800afbc74bbaadf31f3841284539aa13377ab91c44add6e236c58"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.766220 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-8k9rn" Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.767986 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4c85a81f-2e67-4a6f-928b-d4735005cd43","Type":"ContainerStarted","Data":"1d39e13c4a9acb43fbe2410454a7f9b4133832f10f85448963f1bacd80b13945"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.784950 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"27cb9cb7-75ba-479b-ad1e-2e47beaefae4","Type":"ContainerStarted","Data":"20acf68f555b58d3074a3250cafd7ec80f2c7b01e71ae86c7b838d8e154d6e13"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.789982 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1549c716-ca22-42ff-9cea-e63e50856936","Type":"ContainerStarted","Data":"c3325aac867e075e1bf9d5e1b15cef5ce2a7683a3cabdf993ffb88a812e791fc"} Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.850586 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.857755 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-ssw6z"] Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.899510 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.936279 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-8k9rn"] Sep 29 17:07:58 crc kubenswrapper[4592]: I0929 17:07:58.945807 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7x4wp"] Sep 29 17:07:59 crc kubenswrapper[4592]: I0929 17:07:59.195382 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86e11690-8edd-4be1-a94c-1bffc3890248" path="/var/lib/kubelet/pods/86e11690-8edd-4be1-a94c-1bffc3890248/volumes" Sep 29 17:07:59 crc kubenswrapper[4592]: I0929 17:07:59.195730 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b" path="/var/lib/kubelet/pods/c0c9e5c6-c84b-446f-bd87-3c9b9ad2f36b/volumes" Sep 29 17:07:59 crc kubenswrapper[4592]: I0929 17:07:59.208283 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Sep 29 17:07:59 crc kubenswrapper[4592]: W0929 17:07:59.339508 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd373ead_845f_4c4d_b9d7_38f8424697d5.slice/crio-2a4aabbebba07e4b5b0989f9da6f20b2cd52f823511f691901b1dd6cd18edccb WatchSource:0}: Error finding container 2a4aabbebba07e4b5b0989f9da6f20b2cd52f823511f691901b1dd6cd18edccb: Status 404 returned error can't find the container with id 2a4aabbebba07e4b5b0989f9da6f20b2cd52f823511f691901b1dd6cd18edccb Sep 29 17:07:59 crc kubenswrapper[4592]: I0929 17:07:59.800373 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7x4wp" event={"ID":"fd373ead-845f-4c4d-b9d7-38f8424697d5","Type":"ContainerStarted","Data":"2a4aabbebba07e4b5b0989f9da6f20b2cd52f823511f691901b1dd6cd18edccb"} Sep 29 17:08:00 crc kubenswrapper[4592]: W0929 17:08:00.334121 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9e2d243_07ca_4b99_a929_9ae3321c3274.slice/crio-1752c3f0f3659fccced8f53d9d73f8cefae607b55cbca4d076744659020c6381 WatchSource:0}: Error finding container 1752c3f0f3659fccced8f53d9d73f8cefae607b55cbca4d076744659020c6381: Status 404 returned error can't find the container with id 1752c3f0f3659fccced8f53d9d73f8cefae607b55cbca4d076744659020c6381 Sep 29 17:08:00 crc kubenswrapper[4592]: I0929 17:08:00.808895 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b9e2d243-07ca-4b99-a929-9ae3321c3274","Type":"ContainerStarted","Data":"1752c3f0f3659fccced8f53d9d73f8cefae607b55cbca4d076744659020c6381"} Sep 29 17:08:00 crc kubenswrapper[4592]: I0929 17:08:00.882910 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:08:00 crc kubenswrapper[4592]: I0929 17:08:00.882974 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.859996 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"59ac4af6-5ade-49f1-8098-52e823dcf61f","Type":"ContainerStarted","Data":"d6d3fb66cfd55ca4e3f1fd17188703d0febfd33394c21f4e58f5e88d0801f1e8"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.864202 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4c85a81f-2e67-4a6f-928b-d4735005cd43","Type":"ContainerStarted","Data":"a68e7539bfcd0e9e84437e62bdb78894a79007a6583d17fc5967a640b86d3d83"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.865804 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"1ed52369-92ab-4da4-a517-1555c79b0a38","Type":"ContainerStarted","Data":"283c6c27eb8cb882ddcd81a54dd770c59613e198db98b1e1b48c7a4b70e11bf8"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.866763 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"27cb9cb7-75ba-479b-ad1e-2e47beaefae4","Type":"ContainerStarted","Data":"5326605e8fbcd5a21eb61bfc445ac1cd609bfcad97dc8950b790451debaf91bf"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.867006 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.867562 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7x4wp" event={"ID":"fd373ead-845f-4c4d-b9d7-38f8424697d5","Type":"ContainerStarted","Data":"dbca2bc591fcd4d557316bd53e06b5e8bfa8630d90a499620d72a06c1ac2aab1"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.869383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1549c716-ca22-42ff-9cea-e63e50856936","Type":"ContainerStarted","Data":"32e4abf68a24dc45498856195330b6866f709294f5a308d603b0f6b12965b265"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.871505 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b9e2d243-07ca-4b99-a929-9ae3321c3274","Type":"ContainerStarted","Data":"1ed6bf7f8a670f135736dc47a185e78f5ab2da65d9bb5b1329c9de004374fe6e"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.873736 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf" event={"ID":"d813cc31-c8ba-48c0-b523-3d2b3fbc3341","Type":"ContainerStarted","Data":"a777b82ecd889600f56087a3e3136c0ec56ac651c54f167da3cc39bd4ce37b8e"} Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.874023 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jfzwf" Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.926881 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=18.927924613 podStartE2EDuration="26.926861403s" podCreationTimestamp="2025-09-29 17:07:41 +0000 UTC" firstStartedPulling="2025-09-29 17:07:57.63152885 +0000 UTC m=+1007.779306531" lastFinishedPulling="2025-09-29 17:08:05.63046565 +0000 UTC m=+1015.778243321" observedRunningTime="2025-09-29 17:08:07.924852186 +0000 UTC m=+1018.072629867" watchObservedRunningTime="2025-09-29 17:08:07.926861403 +0000 UTC m=+1018.074639084" Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.949779 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jfzwf" podStartSLOduration=11.811036677 podStartE2EDuration="20.949758281s" podCreationTimestamp="2025-09-29 17:07:47 +0000 UTC" firstStartedPulling="2025-09-29 17:07:58.006073308 +0000 UTC m=+1008.153850989" lastFinishedPulling="2025-09-29 17:08:07.144794902 +0000 UTC m=+1017.292572593" observedRunningTime="2025-09-29 17:08:07.949489903 +0000 UTC m=+1018.097267584" watchObservedRunningTime="2025-09-29 17:08:07.949758281 +0000 UTC m=+1018.097535962" Sep 29 17:08:07 crc kubenswrapper[4592]: I0929 17:08:07.971820 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=15.422880273 podStartE2EDuration="24.971802205s" podCreationTimestamp="2025-09-29 17:07:43 +0000 UTC" firstStartedPulling="2025-09-29 17:07:57.789189805 +0000 UTC m=+1007.936967486" lastFinishedPulling="2025-09-29 17:08:07.338111737 +0000 UTC m=+1017.485889418" observedRunningTime="2025-09-29 17:08:07.970306463 +0000 UTC m=+1018.118084144" watchObservedRunningTime="2025-09-29 17:08:07.971802205 +0000 UTC m=+1018.119579886" Sep 29 17:08:08 crc kubenswrapper[4592]: I0929 17:08:08.881171 4592 generic.go:334] "Generic (PLEG): container finished" podID="fd373ead-845f-4c4d-b9d7-38f8424697d5" containerID="dbca2bc591fcd4d557316bd53e06b5e8bfa8630d90a499620d72a06c1ac2aab1" exitCode=0 Sep 29 17:08:08 crc kubenswrapper[4592]: I0929 17:08:08.881253 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7x4wp" event={"ID":"fd373ead-845f-4c4d-b9d7-38f8424697d5","Type":"ContainerDied","Data":"dbca2bc591fcd4d557316bd53e06b5e8bfa8630d90a499620d72a06c1ac2aab1"} Sep 29 17:08:08 crc kubenswrapper[4592]: I0929 17:08:08.882474 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.847916 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-8xpsr"] Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.849588 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.855921 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.864935 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8xpsr"] Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960530 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovn-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960606 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovs-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960634 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5j45\" (UniqueName: \"kubernetes.io/projected/7132c9e8-ff15-414a-b384-4a266f3c84f8-kube-api-access-n5j45\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960714 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960743 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7132c9e8-ff15-414a-b384-4a266f3c84f8-config\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:10 crc kubenswrapper[4592]: I0929 17:08:10.960813 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-combined-ca-bundle\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.015049 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.052376 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.054342 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.056371 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062409 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovn-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062487 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovs-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062515 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5j45\" (UniqueName: \"kubernetes.io/projected/7132c9e8-ff15-414a-b384-4a266f3c84f8-kube-api-access-n5j45\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062586 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062612 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7132c9e8-ff15-414a-b384-4a266f3c84f8-config\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062666 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-combined-ca-bundle\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.062908 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovn-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.063394 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7132c9e8-ff15-414a-b384-4a266f3c84f8-ovs-rundir\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.065676 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.069554 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.076608 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7132c9e8-ff15-414a-b384-4a266f3c84f8-config\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.076723 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7132c9e8-ff15-414a-b384-4a266f3c84f8-combined-ca-bundle\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.117266 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.163881 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hkz2\" (UniqueName: \"kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.163929 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.163952 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.164042 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.166006 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5j45\" (UniqueName: \"kubernetes.io/projected/7132c9e8-ff15-414a-b384-4a266f3c84f8-kube-api-access-n5j45\") pod \"ovn-controller-metrics-8xpsr\" (UID: \"7132c9e8-ff15-414a-b384-4a266f3c84f8\") " pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.172061 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-8xpsr" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.266080 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hkz2\" (UniqueName: \"kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.266137 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.266207 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.266299 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.267050 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.268610 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.270518 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.284539 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.301581 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.303268 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.306093 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hkz2\" (UniqueName: \"kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2\") pod \"dnsmasq-dns-5bf47b49b7-vmx8w\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.306219 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.329371 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.370196 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.370247 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.370308 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.370354 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js9kz\" (UniqueName: \"kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.370408 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.378082 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.471981 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.472023 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.472096 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.472184 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js9kz\" (UniqueName: \"kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.472240 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.473086 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.473793 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.474609 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.474707 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.497738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js9kz\" (UniqueName: \"kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz\") pod \"dnsmasq-dns-8554648995-5w9kn\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.666669 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.808242 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-8xpsr"] Sep 29 17:08:11 crc kubenswrapper[4592]: I0929 17:08:11.904986 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:12 crc kubenswrapper[4592]: W0929 17:08:12.001808 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7132c9e8_ff15_414a_b384_4a266f3c84f8.slice/crio-1cd58b7801588dccdc1ab07ec330ca143ff7f50048c904b60f57e7c47d3f18ae WatchSource:0}: Error finding container 1cd58b7801588dccdc1ab07ec330ca143ff7f50048c904b60f57e7c47d3f18ae: Status 404 returned error can't find the container with id 1cd58b7801588dccdc1ab07ec330ca143ff7f50048c904b60f57e7c47d3f18ae Sep 29 17:08:12 crc kubenswrapper[4592]: W0929 17:08:12.003948 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f2500ec_e138_4ef6_a684_0400fc77e822.slice/crio-7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46 WatchSource:0}: Error finding container 7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46: Status 404 returned error can't find the container with id 7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46 Sep 29 17:08:12 crc kubenswrapper[4592]: I0929 17:08:12.410268 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:12 crc kubenswrapper[4592]: W0929 17:08:12.416979 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50aa0c99_1d2e_4d25_8538_f7561e08fe27.slice/crio-a8db0f10bff98a0b91a7959f4b35925bba1cc2e47b7b67882760e1f3fef57f53 WatchSource:0}: Error finding container a8db0f10bff98a0b91a7959f4b35925bba1cc2e47b7b67882760e1f3fef57f53: Status 404 returned error can't find the container with id a8db0f10bff98a0b91a7959f4b35925bba1cc2e47b7b67882760e1f3fef57f53 Sep 29 17:08:12 crc kubenswrapper[4592]: I0929 17:08:12.936424 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8xpsr" event={"ID":"7132c9e8-ff15-414a-b384-4a266f3c84f8","Type":"ContainerStarted","Data":"1cd58b7801588dccdc1ab07ec330ca143ff7f50048c904b60f57e7c47d3f18ae"} Sep 29 17:08:12 crc kubenswrapper[4592]: I0929 17:08:12.937448 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerStarted","Data":"7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46"} Sep 29 17:08:12 crc kubenswrapper[4592]: I0929 17:08:12.938420 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-5w9kn" event={"ID":"50aa0c99-1d2e-4d25-8538-f7561e08fe27","Type":"ContainerStarted","Data":"a8db0f10bff98a0b91a7959f4b35925bba1cc2e47b7b67882760e1f3fef57f53"} Sep 29 17:08:13 crc kubenswrapper[4592]: I0929 17:08:13.945848 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7x4wp" event={"ID":"fd373ead-845f-4c4d-b9d7-38f8424697d5","Type":"ContainerStarted","Data":"3ca14432c123b9a433217e0bcd1c6d4877833440be13e44ede6825e356671aeb"} Sep 29 17:08:13 crc kubenswrapper[4592]: I0929 17:08:13.963439 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 17:08:15 crc kubenswrapper[4592]: I0929 17:08:15.965955 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7x4wp" event={"ID":"fd373ead-845f-4c4d-b9d7-38f8424697d5","Type":"ContainerStarted","Data":"5b27514e9e7aa43736278782ec5c9180cd61add9eef0caa442ebe4715740f843"} Sep 29 17:08:15 crc kubenswrapper[4592]: I0929 17:08:15.966548 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:08:15 crc kubenswrapper[4592]: I0929 17:08:15.968097 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" event={"ID":"436f5852-e2a7-4374-ad72-e9f4c63a046b","Type":"ContainerStarted","Data":"da2173af44e65c7f2069695997b226dab28186a7e2d08bb2cfb1646997a31cd2"} Sep 29 17:08:15 crc kubenswrapper[4592]: I0929 17:08:15.968183 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" containerName="init" containerID="cri-o://da2173af44e65c7f2069695997b226dab28186a7e2d08bb2cfb1646997a31cd2" gracePeriod=10 Sep 29 17:08:15 crc kubenswrapper[4592]: I0929 17:08:15.972579 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerStarted","Data":"4b8e4115fda1e0fd28559ce11dea38480f346f5edee0b3195a3f56733a31b652"} Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.002012 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-7x4wp" podStartSLOduration=21.581643276 podStartE2EDuration="29.001992859s" podCreationTimestamp="2025-09-29 17:07:47 +0000 UTC" firstStartedPulling="2025-09-29 17:07:59.34105981 +0000 UTC m=+1009.488837491" lastFinishedPulling="2025-09-29 17:08:06.761409393 +0000 UTC m=+1016.909187074" observedRunningTime="2025-09-29 17:08:15.990690719 +0000 UTC m=+1026.138468400" watchObservedRunningTime="2025-09-29 17:08:16.001992859 +0000 UTC m=+1026.149770540" Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.701216 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.980080 4592 generic.go:334] "Generic (PLEG): container finished" podID="436f5852-e2a7-4374-ad72-e9f4c63a046b" containerID="da2173af44e65c7f2069695997b226dab28186a7e2d08bb2cfb1646997a31cd2" exitCode=5 Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.980138 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" event={"ID":"436f5852-e2a7-4374-ad72-e9f4c63a046b","Type":"ContainerDied","Data":"da2173af44e65c7f2069695997b226dab28186a7e2d08bb2cfb1646997a31cd2"} Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.982084 4592 generic.go:334] "Generic (PLEG): container finished" podID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerID="4b8e4115fda1e0fd28559ce11dea38480f346f5edee0b3195a3f56733a31b652" exitCode=0 Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.983388 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerDied","Data":"4b8e4115fda1e0fd28559ce11dea38480f346f5edee0b3195a3f56733a31b652"} Sep 29 17:08:16 crc kubenswrapper[4592]: I0929 17:08:16.983419 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.546596 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.593855 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config\") pod \"436f5852-e2a7-4374-ad72-e9f4c63a046b\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.593982 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc\") pod \"436f5852-e2a7-4374-ad72-e9f4c63a046b\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.594254 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qth8w\" (UniqueName: \"kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w\") pod \"436f5852-e2a7-4374-ad72-e9f4c63a046b\" (UID: \"436f5852-e2a7-4374-ad72-e9f4c63a046b\") " Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.598127 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w" (OuterVolumeSpecName: "kube-api-access-qth8w") pod "436f5852-e2a7-4374-ad72-e9f4c63a046b" (UID: "436f5852-e2a7-4374-ad72-e9f4c63a046b"). InnerVolumeSpecName "kube-api-access-qth8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.626202 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "436f5852-e2a7-4374-ad72-e9f4c63a046b" (UID: "436f5852-e2a7-4374-ad72-e9f4c63a046b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.628732 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config" (OuterVolumeSpecName: "config") pod "436f5852-e2a7-4374-ad72-e9f4c63a046b" (UID: "436f5852-e2a7-4374-ad72-e9f4c63a046b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.697255 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qth8w\" (UniqueName: \"kubernetes.io/projected/436f5852-e2a7-4374-ad72-e9f4c63a046b-kube-api-access-qth8w\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.697639 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.697653 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/436f5852-e2a7-4374-ad72-e9f4c63a046b-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.998060 4592 generic.go:334] "Generic (PLEG): container finished" podID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" containerID="c1184bf57440ef5ce29f4912ae208662cd9308d8958f622db2a9097d924ebd67" exitCode=0 Sep 29 17:08:18 crc kubenswrapper[4592]: I0929 17:08:18.998111 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" event={"ID":"5cdeea8f-847e-49a2-8f8f-2d04429192c2","Type":"ContainerDied","Data":"c1184bf57440ef5ce29f4912ae208662cd9308d8958f622db2a9097d924ebd67"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.000176 4592 generic.go:334] "Generic (PLEG): container finished" podID="4c85a81f-2e67-4a6f-928b-d4735005cd43" containerID="a68e7539bfcd0e9e84437e62bdb78894a79007a6583d17fc5967a640b86d3d83" exitCode=0 Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.000261 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4c85a81f-2e67-4a6f-928b-d4735005cd43","Type":"ContainerDied","Data":"a68e7539bfcd0e9e84437e62bdb78894a79007a6583d17fc5967a640b86d3d83"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.004277 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1549c716-ca22-42ff-9cea-e63e50856936","Type":"ContainerStarted","Data":"1e4f42907f2a994cb94b1efa049ae7d1443bbebc49cae1c4419246357ac2b064"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.006217 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"b9e2d243-07ca-4b99-a929-9ae3321c3274","Type":"ContainerStarted","Data":"4b89a650b62e038c9141882bda0c9d2df79088e7bb8e5885c674dbf44966fb36"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.012350 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerStarted","Data":"39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.012432 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.018393 4592 generic.go:334] "Generic (PLEG): container finished" podID="59ac4af6-5ade-49f1-8098-52e823dcf61f" containerID="d6d3fb66cfd55ca4e3f1fd17188703d0febfd33394c21f4e58f5e88d0801f1e8" exitCode=0 Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.018529 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"59ac4af6-5ade-49f1-8098-52e823dcf61f","Type":"ContainerDied","Data":"d6d3fb66cfd55ca4e3f1fd17188703d0febfd33394c21f4e58f5e88d0801f1e8"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.026015 4592 generic.go:334] "Generic (PLEG): container finished" podID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerID="6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503" exitCode=0 Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.026504 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-5w9kn" event={"ID":"50aa0c99-1d2e-4d25-8538-f7561e08fe27","Type":"ContainerDied","Data":"6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.045236 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-8xpsr" event={"ID":"7132c9e8-ff15-414a-b384-4a266f3c84f8","Type":"ContainerStarted","Data":"fb90c1f41fdc44019dc9c7f7f104cd674c5726a039fcd5b7ed4c9cafda0dce81"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.057340 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.751706447 podStartE2EDuration="30.057310018s" podCreationTimestamp="2025-09-29 17:07:49 +0000 UTC" firstStartedPulling="2025-09-29 17:07:58.295879776 +0000 UTC m=+1008.443657457" lastFinishedPulling="2025-09-29 17:08:18.601483347 +0000 UTC m=+1028.749261028" observedRunningTime="2025-09-29 17:08:19.037710383 +0000 UTC m=+1029.185488084" watchObservedRunningTime="2025-09-29 17:08:19.057310018 +0000 UTC m=+1029.205087709" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.058368 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" event={"ID":"436f5852-e2a7-4374-ad72-e9f4c63a046b","Type":"ContainerDied","Data":"7448b54708981054d894e8bbe7e384e269edcc8cd03e6ee096e58a98449e21c0"} Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.058434 4592 scope.go:117] "RemoveContainer" containerID="da2173af44e65c7f2069695997b226dab28186a7e2d08bb2cfb1646997a31cd2" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.058614 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.099815 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=13.861669108 podStartE2EDuration="32.099802431s" podCreationTimestamp="2025-09-29 17:07:47 +0000 UTC" firstStartedPulling="2025-09-29 17:08:00.338946804 +0000 UTC m=+1010.486724485" lastFinishedPulling="2025-09-29 17:08:18.577080127 +0000 UTC m=+1028.724857808" observedRunningTime="2025-09-29 17:08:19.091698592 +0000 UTC m=+1029.239476273" watchObservedRunningTime="2025-09-29 17:08:19.099802431 +0000 UTC m=+1029.247580112" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.106052 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.106228 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.123862 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" podStartSLOduration=4.430570235 podStartE2EDuration="8.123842343s" podCreationTimestamp="2025-09-29 17:08:11 +0000 UTC" firstStartedPulling="2025-09-29 17:08:12.006674477 +0000 UTC m=+1022.154452168" lastFinishedPulling="2025-09-29 17:08:15.699946595 +0000 UTC m=+1025.847724276" observedRunningTime="2025-09-29 17:08:19.118880332 +0000 UTC m=+1029.266658013" watchObservedRunningTime="2025-09-29 17:08:19.123842343 +0000 UTC m=+1029.271620024" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.244747 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.308954 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-8xpsr" podStartSLOduration=2.790039224 podStartE2EDuration="9.308927545s" podCreationTimestamp="2025-09-29 17:08:10 +0000 UTC" firstStartedPulling="2025-09-29 17:08:12.006666516 +0000 UTC m=+1022.154444217" lastFinishedPulling="2025-09-29 17:08:18.525554857 +0000 UTC m=+1028.673332538" observedRunningTime="2025-09-29 17:08:19.27767298 +0000 UTC m=+1029.425450661" watchObservedRunningTime="2025-09-29 17:08:19.308927545 +0000 UTC m=+1029.456705216" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.348273 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.513421 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgn6p\" (UniqueName: \"kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p\") pod \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.513495 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc\") pod \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.513533 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config\") pod \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\" (UID: \"5cdeea8f-847e-49a2-8f8f-2d04429192c2\") " Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.517707 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p" (OuterVolumeSpecName: "kube-api-access-cgn6p") pod "5cdeea8f-847e-49a2-8f8f-2d04429192c2" (UID: "5cdeea8f-847e-49a2-8f8f-2d04429192c2"). InnerVolumeSpecName "kube-api-access-cgn6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.533100 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config" (OuterVolumeSpecName: "config") pod "5cdeea8f-847e-49a2-8f8f-2d04429192c2" (UID: "5cdeea8f-847e-49a2-8f8f-2d04429192c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.533635 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5cdeea8f-847e-49a2-8f8f-2d04429192c2" (UID: "5cdeea8f-847e-49a2-8f8f-2d04429192c2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.615750 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgn6p\" (UniqueName: \"kubernetes.io/projected/5cdeea8f-847e-49a2-8f8f-2d04429192c2-kube-api-access-cgn6p\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.615806 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:19 crc kubenswrapper[4592]: I0929 17:08:19.615815 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cdeea8f-847e-49a2-8f8f-2d04429192c2-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.069268 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"59ac4af6-5ade-49f1-8098-52e823dcf61f","Type":"ContainerStarted","Data":"dc74b49432c71c8e16a53101496edbcaa9e33ac3ddc597f11a36afe436d1a9ed"} Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.072018 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-5w9kn" event={"ID":"50aa0c99-1d2e-4d25-8538-f7561e08fe27","Type":"ContainerStarted","Data":"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b"} Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.072257 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.073327 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" event={"ID":"5cdeea8f-847e-49a2-8f8f-2d04429192c2","Type":"ContainerDied","Data":"955bdd8a785f51a7c47eef4d45a5f82f5ebcae02147038cbae7a91c798485c29"} Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.073363 4592 scope.go:117] "RemoveContainer" containerID="c1184bf57440ef5ce29f4912ae208662cd9308d8958f622db2a9097d924ebd67" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.073367 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-cxttg" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.075857 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"4c85a81f-2e67-4a6f-928b-d4735005cd43","Type":"ContainerStarted","Data":"3a2de40282e65f2b3e868839592d82d4aba04fd2cbe7f9d2af33db713f4251ae"} Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.096934 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=31.6411615 podStartE2EDuration="41.096916334s" podCreationTimestamp="2025-09-29 17:07:39 +0000 UTC" firstStartedPulling="2025-09-29 17:07:57.688972536 +0000 UTC m=+1007.836750217" lastFinishedPulling="2025-09-29 17:08:07.14472737 +0000 UTC m=+1017.292505051" observedRunningTime="2025-09-29 17:08:20.093630761 +0000 UTC m=+1030.241408462" watchObservedRunningTime="2025-09-29 17:08:20.096916334 +0000 UTC m=+1030.244694015" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.122375 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=32.121556157 podStartE2EDuration="41.122357434s" podCreationTimestamp="2025-09-29 17:07:39 +0000 UTC" firstStartedPulling="2025-09-29 17:07:57.760627086 +0000 UTC m=+1007.908404767" lastFinishedPulling="2025-09-29 17:08:06.761428363 +0000 UTC m=+1016.909206044" observedRunningTime="2025-09-29 17:08:20.114995516 +0000 UTC m=+1030.262773237" watchObservedRunningTime="2025-09-29 17:08:20.122357434 +0000 UTC m=+1030.270135115" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.123905 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.141342 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-5w9kn" podStartSLOduration=3.105839312 podStartE2EDuration="9.141317371s" podCreationTimestamp="2025-09-29 17:08:11 +0000 UTC" firstStartedPulling="2025-09-29 17:08:12.418932183 +0000 UTC m=+1022.566709864" lastFinishedPulling="2025-09-29 17:08:18.454410242 +0000 UTC m=+1028.602187923" observedRunningTime="2025-09-29 17:08:20.134659643 +0000 UTC m=+1030.282437344" watchObservedRunningTime="2025-09-29 17:08:20.141317371 +0000 UTC m=+1030.289095052" Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.221364 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:08:20 crc kubenswrapper[4592]: I0929 17:08:20.233437 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-cxttg"] Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.136592 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.136997 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.205083 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" path="/var/lib/kubelet/pods/5cdeea8f-847e-49a2-8f8f-2d04429192c2/volumes" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.226421 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.226479 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.263549 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.498815 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Sep 29 17:08:21 crc kubenswrapper[4592]: I0929 17:08:21.498921 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.136083 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.306077 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Sep 29 17:08:22 crc kubenswrapper[4592]: E0929 17:08:22.306507 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.306524 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: E0929 17:08:22.306561 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.306569 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.306757 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cdeea8f-847e-49a2-8f8f-2d04429192c2" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.306772 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" containerName="init" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.307802 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.312131 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.313815 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.315005 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.315567 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.321098 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-tzdwr" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361375 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-scripts\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361425 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361472 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-config\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361487 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361517 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361561 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.361595 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75bc6\" (UniqueName: \"kubernetes.io/projected/8bd365a0-dba3-4f81-a229-a344e01a6eca-kube-api-access-75bc6\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.462975 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463065 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75bc6\" (UniqueName: \"kubernetes.io/projected/8bd365a0-dba3-4f81-a229-a344e01a6eca-kube-api-access-75bc6\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463112 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-scripts\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463161 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463214 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-config\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463235 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463519 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.463820 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.464696 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-config\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.464728 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bd365a0-dba3-4f81-a229-a344e01a6eca-scripts\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.470969 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.471045 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.471497 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8bd365a0-dba3-4f81-a229-a344e01a6eca-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.485780 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75bc6\" (UniqueName: \"kubernetes.io/projected/8bd365a0-dba3-4f81-a229-a344e01a6eca-kube-api-access-75bc6\") pod \"ovn-northd-0\" (UID: \"8bd365a0-dba3-4f81-a229-a344e01a6eca\") " pod="openstack/ovn-northd-0" Sep 29 17:08:22 crc kubenswrapper[4592]: I0929 17:08:22.639126 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Sep 29 17:08:23 crc kubenswrapper[4592]: I0929 17:08:23.087324 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.019718 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.019960 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="dnsmasq-dns" containerID="cri-o://39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157" gracePeriod=10 Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.027446 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.140039 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.153338 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.174311 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8bd365a0-dba3-4f81-a229-a344e01a6eca","Type":"ContainerStarted","Data":"1db5acb0fdb43bc5bdd6089b97c8ff5049f4b4c730655b2643cef423483297fb"} Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.223119 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.294209 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.294281 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkjnm\" (UniqueName: \"kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.294319 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.294435 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.294489 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.395322 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.395372 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.395406 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkjnm\" (UniqueName: \"kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.395436 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.395494 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.396592 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.397098 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.397585 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.398300 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.438023 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkjnm\" (UniqueName: \"kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm\") pod \"dnsmasq-dns-b8fbc5445-zwcph\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: I0929 17:08:24.564819 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:24 crc kubenswrapper[4592]: E0929 17:08:24.590352 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f2500ec_e138_4ef6_a684_0400fc77e822.slice/crio-39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f2500ec_e138_4ef6_a684_0400fc77e822.slice/crio-conmon-39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.093236 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:08:25 crc kubenswrapper[4592]: W0929 17:08:25.151811 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2c2f732_4572_4064_9379_b627d76b87fd.slice/crio-3920c45f47fb2e4281e2caddefd169f443d4aeb8300bb4c3b0a408784e49036d WatchSource:0}: Error finding container 3920c45f47fb2e4281e2caddefd169f443d4aeb8300bb4c3b0a408784e49036d: Status 404 returned error can't find the container with id 3920c45f47fb2e4281e2caddefd169f443d4aeb8300bb4c3b0a408784e49036d Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.205308 4592 generic.go:334] "Generic (PLEG): container finished" podID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerID="39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157" exitCode=0 Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.205659 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerDied","Data":"39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157"} Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.206450 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" event={"ID":"3f2500ec-e138-4ef6-a684-0400fc77e822","Type":"ContainerDied","Data":"7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46"} Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.206476 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f316b242772dfa14821057d10ec298668293386b046ee8e69552eea7f2f3d46" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.208898 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" event={"ID":"d2c2f732-4572-4064-9379-b627d76b87fd","Type":"ContainerStarted","Data":"3920c45f47fb2e4281e2caddefd169f443d4aeb8300bb4c3b0a408784e49036d"} Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.241691 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.364859 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Sep 29 17:08:25 crc kubenswrapper[4592]: E0929 17:08:25.365208 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="dnsmasq-dns" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.365224 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="dnsmasq-dns" Sep 29 17:08:25 crc kubenswrapper[4592]: E0929 17:08:25.365241 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="init" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.365248 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="init" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.365466 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" containerName="dnsmasq-dns" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.372795 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.380377 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.383855 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.384202 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-lxhtz" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.384241 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.392643 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.415211 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb\") pod \"3f2500ec-e138-4ef6-a684-0400fc77e822\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.415562 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config\") pod \"3f2500ec-e138-4ef6-a684-0400fc77e822\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.416274 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hkz2\" (UniqueName: \"kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2\") pod \"3f2500ec-e138-4ef6-a684-0400fc77e822\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.416438 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc\") pod \"3f2500ec-e138-4ef6-a684-0400fc77e822\" (UID: \"3f2500ec-e138-4ef6-a684-0400fc77e822\") " Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.423969 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2" (OuterVolumeSpecName: "kube-api-access-9hkz2") pod "3f2500ec-e138-4ef6-a684-0400fc77e822" (UID: "3f2500ec-e138-4ef6-a684-0400fc77e822"). InnerVolumeSpecName "kube-api-access-9hkz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522397 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522754 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522800 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-lock\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522874 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-cache\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522899 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl86g\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-kube-api-access-kl86g\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.522967 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hkz2\" (UniqueName: \"kubernetes.io/projected/3f2500ec-e138-4ef6-a684-0400fc77e822-kube-api-access-9hkz2\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.529605 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config" (OuterVolumeSpecName: "config") pod "3f2500ec-e138-4ef6-a684-0400fc77e822" (UID: "3f2500ec-e138-4ef6-a684-0400fc77e822"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.531179 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f2500ec-e138-4ef6-a684-0400fc77e822" (UID: "3f2500ec-e138-4ef6-a684-0400fc77e822"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.538828 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3f2500ec-e138-4ef6-a684-0400fc77e822" (UID: "3f2500ec-e138-4ef6-a684-0400fc77e822"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.624488 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.624846 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-lock\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.624933 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-cache\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.624964 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl86g\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-kube-api-access-kl86g\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.625015 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.625138 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.625169 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.625184 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f2500ec-e138-4ef6-a684-0400fc77e822-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:25 crc kubenswrapper[4592]: E0929 17:08:25.625294 4592 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 17:08:25 crc kubenswrapper[4592]: E0929 17:08:25.625309 4592 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 17:08:25 crc kubenswrapper[4592]: E0929 17:08:25.625362 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift podName:3f64e72d-c39e-45fa-b3df-ae8624976e86 nodeName:}" failed. No retries permitted until 2025-09-29 17:08:26.12534263 +0000 UTC m=+1036.273120321 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift") pod "swift-storage-0" (UID: "3f64e72d-c39e-45fa-b3df-ae8624976e86") : configmap "swift-ring-files" not found Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.624773 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.626290 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-lock\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.626519 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f64e72d-c39e-45fa-b3df-ae8624976e86-cache\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.642561 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl86g\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-kube-api-access-kl86g\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.666339 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.974084 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-pfxfs"] Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.975281 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.977430 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.977514 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Sep 29 17:08:25 crc kubenswrapper[4592]: I0929 17:08:25.977882 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.028876 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-kk56p"] Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.030135 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.080501 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-pfxfs"] Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134477 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134533 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134574 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134609 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134667 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpks8\" (UniqueName: \"kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134701 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134739 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134768 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134805 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134828 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134849 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134885 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134916 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134941 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.134972 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmm4m\" (UniqueName: \"kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: E0929 17:08:26.135178 4592 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 17:08:26 crc kubenswrapper[4592]: E0929 17:08:26.135194 4592 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 17:08:26 crc kubenswrapper[4592]: E0929 17:08:26.135239 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift podName:3f64e72d-c39e-45fa-b3df-ae8624976e86 nodeName:}" failed. No retries permitted until 2025-09-29 17:08:27.135220564 +0000 UTC m=+1037.282998245 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift") pod "swift-storage-0" (UID: "3f64e72d-c39e-45fa-b3df-ae8624976e86") : configmap "swift-ring-files" not found Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.138095 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-pfxfs"] Sep 29 17:08:26 crc kubenswrapper[4592]: E0929 17:08:26.141245 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-xpks8 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/swift-ring-rebalance-pfxfs" podUID="04374dee-098a-4fd6-815a-5e861b97fc70" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.145370 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-kk56p"] Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.220023 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8bd365a0-dba3-4f81-a229-a344e01a6eca","Type":"ContainerStarted","Data":"1a20576a3d697deacdc874ba07553265fbaceda81b546486e9f59420d002f45d"} Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.220083 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"8bd365a0-dba3-4f81-a229-a344e01a6eca","Type":"ContainerStarted","Data":"054079d4bce9523f3fedf7f538784cd973716f2069dcf5f30fa22f166bff5a5b"} Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.220135 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.222802 4592 generic.go:334] "Generic (PLEG): container finished" podID="d2c2f732-4572-4064-9379-b627d76b87fd" containerID="5eb6a4f9f69f14ec0de1df22e2058284986837111f5de7322cfce8f7ba7d3978" exitCode=0 Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.222859 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.222883 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" event={"ID":"d2c2f732-4572-4064-9379-b627d76b87fd","Type":"ContainerDied","Data":"5eb6a4f9f69f14ec0de1df22e2058284986837111f5de7322cfce8f7ba7d3978"} Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.222953 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-vmx8w" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236020 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236095 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236129 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236201 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmm4m\" (UniqueName: \"kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236242 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236265 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236289 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236319 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236384 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpks8\" (UniqueName: \"kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236420 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236463 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236509 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236537 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236576 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.236971 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.237264 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.239102 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.239308 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.239727 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.243358 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.246889 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.249610 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.252173 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.257218 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.261418 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.272642 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.281988 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.282937 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpks8\" (UniqueName: \"kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8\") pod \"swift-ring-rebalance-pfxfs\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.308524 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmm4m\" (UniqueName: \"kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m\") pod \"swift-ring-rebalance-kk56p\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.309551 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.168412139 podStartE2EDuration="4.309537234s" podCreationTimestamp="2025-09-29 17:08:22 +0000 UTC" firstStartedPulling="2025-09-29 17:08:23.098664695 +0000 UTC m=+1033.246442376" lastFinishedPulling="2025-09-29 17:08:25.23978979 +0000 UTC m=+1035.387567471" observedRunningTime="2025-09-29 17:08:26.256914824 +0000 UTC m=+1036.404692495" watchObservedRunningTime="2025-09-29 17:08:26.309537234 +0000 UTC m=+1036.457314925" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.318341 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.324680 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-vmx8w"] Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.339621 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.339914 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.340045 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpks8\" (UniqueName: \"kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.340168 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.340309 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.340398 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.340516 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices\") pod \"04374dee-098a-4fd6-815a-5e861b97fc70\" (UID: \"04374dee-098a-4fd6-815a-5e861b97fc70\") " Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.344823 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.354583 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8" (OuterVolumeSpecName: "kube-api-access-xpks8") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "kube-api-access-xpks8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.355039 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.358125 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.361870 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.362168 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts" (OuterVolumeSpecName: "scripts") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.364098 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04374dee-098a-4fd6-815a-5e861b97fc70" (UID: "04374dee-098a-4fd6-815a-5e861b97fc70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.401265 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443913 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443945 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443956 4592 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/04374dee-098a-4fd6-815a-5e861b97fc70-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443967 4592 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443979 4592 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/04374dee-098a-4fd6-815a-5e861b97fc70-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443987 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpks8\" (UniqueName: \"kubernetes.io/projected/04374dee-098a-4fd6-815a-5e861b97fc70-kube-api-access-xpks8\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.443996 4592 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/04374dee-098a-4fd6-815a-5e861b97fc70-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.668944 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:26 crc kubenswrapper[4592]: I0929 17:08:26.842617 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-kk56p"] Sep 29 17:08:26 crc kubenswrapper[4592]: W0929 17:08:26.855804 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb87c021d_8ea5_4e65_9a34_68e38d02b6c3.slice/crio-9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125 WatchSource:0}: Error finding container 9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125: Status 404 returned error can't find the container with id 9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125 Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.155087 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:27 crc kubenswrapper[4592]: E0929 17:08:27.155322 4592 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 17:08:27 crc kubenswrapper[4592]: E0929 17:08:27.155365 4592 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 17:08:27 crc kubenswrapper[4592]: E0929 17:08:27.155442 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift podName:3f64e72d-c39e-45fa-b3df-ae8624976e86 nodeName:}" failed. No retries permitted until 2025-09-29 17:08:29.155417386 +0000 UTC m=+1039.303195057 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift") pod "swift-storage-0" (UID: "3f64e72d-c39e-45fa-b3df-ae8624976e86") : configmap "swift-ring-files" not found Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.191486 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f2500ec-e138-4ef6-a684-0400fc77e822" path="/var/lib/kubelet/pods/3f2500ec-e138-4ef6-a684-0400fc77e822/volumes" Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.215750 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.232040 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" event={"ID":"d2c2f732-4572-4064-9379-b627d76b87fd","Type":"ContainerStarted","Data":"d87ea940166c0e95100aecb6d84240aa857576b1babda02b23e7d9a60f7e4996"} Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.232715 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.241501 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-pfxfs" Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.241490 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kk56p" event={"ID":"b87c021d-8ea5-4e65-9a34-68e38d02b6c3","Type":"ContainerStarted","Data":"9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125"} Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.284660 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" podStartSLOduration=3.284644655 podStartE2EDuration="3.284644655s" podCreationTimestamp="2025-09-29 17:08:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:08:27.259107355 +0000 UTC m=+1037.406885036" watchObservedRunningTime="2025-09-29 17:08:27.284644655 +0000 UTC m=+1037.432422326" Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.295423 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-pfxfs"] Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.296887 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-pfxfs"] Sep 29 17:08:27 crc kubenswrapper[4592]: I0929 17:08:27.334191 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Sep 29 17:08:28 crc kubenswrapper[4592]: I0929 17:08:28.250452 4592 generic.go:334] "Generic (PLEG): container finished" podID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerID="e76507d181ba89b027da1aa7409c60822aa6079a9886d55ec0a23fd0d49cae9f" exitCode=0 Sep 29 17:08:28 crc kubenswrapper[4592]: I0929 17:08:28.250532 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerDied","Data":"e76507d181ba89b027da1aa7409c60822aa6079a9886d55ec0a23fd0d49cae9f"} Sep 29 17:08:28 crc kubenswrapper[4592]: I0929 17:08:28.261849 4592 generic.go:334] "Generic (PLEG): container finished" podID="62319168-243a-4613-a565-d864d75110e2" containerID="6b3c15d30109ab050101c4ecbd3b5c0d3bea838101281d73bcfb26096ff58bad" exitCode=0 Sep 29 17:08:28 crc kubenswrapper[4592]: I0929 17:08:28.262258 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerDied","Data":"6b3c15d30109ab050101c4ecbd3b5c0d3bea838101281d73bcfb26096ff58bad"} Sep 29 17:08:29 crc kubenswrapper[4592]: I0929 17:08:29.194469 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04374dee-098a-4fd6-815a-5e861b97fc70" path="/var/lib/kubelet/pods/04374dee-098a-4fd6-815a-5e861b97fc70/volumes" Sep 29 17:08:29 crc kubenswrapper[4592]: I0929 17:08:29.204633 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:29 crc kubenswrapper[4592]: E0929 17:08:29.204839 4592 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 17:08:29 crc kubenswrapper[4592]: E0929 17:08:29.204871 4592 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 17:08:29 crc kubenswrapper[4592]: E0929 17:08:29.204947 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift podName:3f64e72d-c39e-45fa-b3df-ae8624976e86 nodeName:}" failed. No retries permitted until 2025-09-29 17:08:33.204924071 +0000 UTC m=+1043.352701752 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift") pod "swift-storage-0" (UID: "3f64e72d-c39e-45fa-b3df-ae8624976e86") : configmap "swift-ring-files" not found Sep 29 17:08:29 crc kubenswrapper[4592]: I0929 17:08:29.585765 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Sep 29 17:08:29 crc kubenswrapper[4592]: I0929 17:08:29.645201 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Sep 29 17:08:30 crc kubenswrapper[4592]: I0929 17:08:30.882910 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:08:30 crc kubenswrapper[4592]: I0929 17:08:30.882972 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.286453 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerStarted","Data":"442e5619db30e129f3dbf7b0ce4ec443150337c3ab97c9ea2d72da996393da54"} Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.286923 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.287741 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kk56p" event={"ID":"b87c021d-8ea5-4e65-9a34-68e38d02b6c3","Type":"ContainerStarted","Data":"ec2916f7c6c40ca3e7af0bbd245d6de02dc517915801e32283a03eb03d69445c"} Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.289550 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerStarted","Data":"a0eaf2ddf3010ccc093f408dc1affbaf4ddec3e16636409a43b6646eb6f6d837"} Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.289870 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.308801 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.346946952 podStartE2EDuration="54.30878632s" podCreationTimestamp="2025-09-29 17:07:37 +0000 UTC" firstStartedPulling="2025-09-29 17:07:39.262506788 +0000 UTC m=+989.410284469" lastFinishedPulling="2025-09-29 17:07:57.224346156 +0000 UTC m=+1007.372123837" observedRunningTime="2025-09-29 17:08:31.305908393 +0000 UTC m=+1041.453686074" watchObservedRunningTime="2025-09-29 17:08:31.30878632 +0000 UTC m=+1041.456564001" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.330606 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-kk56p" podStartSLOduration=1.2097815299999999 podStartE2EDuration="5.330586817s" podCreationTimestamp="2025-09-29 17:08:26 +0000 UTC" firstStartedPulling="2025-09-29 17:08:26.858124369 +0000 UTC m=+1037.005902050" lastFinishedPulling="2025-09-29 17:08:30.978929646 +0000 UTC m=+1041.126707337" observedRunningTime="2025-09-29 17:08:31.326043568 +0000 UTC m=+1041.473821249" watchObservedRunningTime="2025-09-29 17:08:31.330586817 +0000 UTC m=+1041.478364488" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.485093 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.092102739 podStartE2EDuration="55.485074549s" podCreationTimestamp="2025-09-29 17:07:36 +0000 UTC" firstStartedPulling="2025-09-29 17:07:38.799070861 +0000 UTC m=+988.946848542" lastFinishedPulling="2025-09-29 17:07:57.192042671 +0000 UTC m=+1007.339820352" observedRunningTime="2025-09-29 17:08:31.354975434 +0000 UTC m=+1041.502753115" watchObservedRunningTime="2025-09-29 17:08:31.485074549 +0000 UTC m=+1041.632852230" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.485897 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-26t8s"] Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.486935 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.494323 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-26t8s"] Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.547087 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ptk2\" (UniqueName: \"kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2\") pod \"keystone-db-create-26t8s\" (UID: \"b6fab31a-6011-49cd-8191-0da215d37ed5\") " pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.589206 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-clmzt"] Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.590206 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clmzt" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.600841 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-clmzt"] Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.648576 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ptk2\" (UniqueName: \"kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2\") pod \"keystone-db-create-26t8s\" (UID: \"b6fab31a-6011-49cd-8191-0da215d37ed5\") " pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.648644 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rds45\" (UniqueName: \"kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45\") pod \"placement-db-create-clmzt\" (UID: \"784e7ce0-88bf-4f62-a9f2-945d9130750a\") " pod="openstack/placement-db-create-clmzt" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.669599 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ptk2\" (UniqueName: \"kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2\") pod \"keystone-db-create-26t8s\" (UID: \"b6fab31a-6011-49cd-8191-0da215d37ed5\") " pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.750449 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rds45\" (UniqueName: \"kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45\") pod \"placement-db-create-clmzt\" (UID: \"784e7ce0-88bf-4f62-a9f2-945d9130750a\") " pod="openstack/placement-db-create-clmzt" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.768515 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rds45\" (UniqueName: \"kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45\") pod \"placement-db-create-clmzt\" (UID: \"784e7ce0-88bf-4f62-a9f2-945d9130750a\") " pod="openstack/placement-db-create-clmzt" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.800296 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:31 crc kubenswrapper[4592]: I0929 17:08:31.907169 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clmzt" Sep 29 17:08:32 crc kubenswrapper[4592]: I0929 17:08:32.078328 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-26t8s"] Sep 29 17:08:32 crc kubenswrapper[4592]: I0929 17:08:32.298120 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-26t8s" event={"ID":"b6fab31a-6011-49cd-8191-0da215d37ed5","Type":"ContainerStarted","Data":"b0e40ac4955c0613ebaa1232e3168cfaf5ebadacf121ef8b2548db4e959425a0"} Sep 29 17:08:32 crc kubenswrapper[4592]: I0929 17:08:32.446267 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-clmzt"] Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.277811 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:33 crc kubenswrapper[4592]: E0929 17:08:33.278065 4592 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Sep 29 17:08:33 crc kubenswrapper[4592]: E0929 17:08:33.278491 4592 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Sep 29 17:08:33 crc kubenswrapper[4592]: E0929 17:08:33.278561 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift podName:3f64e72d-c39e-45fa-b3df-ae8624976e86 nodeName:}" failed. No retries permitted until 2025-09-29 17:08:41.278543455 +0000 UTC m=+1051.426321136 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift") pod "swift-storage-0" (UID: "3f64e72d-c39e-45fa-b3df-ae8624976e86") : configmap "swift-ring-files" not found Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.311256 4592 generic.go:334] "Generic (PLEG): container finished" podID="b6fab31a-6011-49cd-8191-0da215d37ed5" containerID="016204b5c6bb67ac28917edf7322bdfa9a568f677d8d4f754e57d39edea938f8" exitCode=0 Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.311298 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-26t8s" event={"ID":"b6fab31a-6011-49cd-8191-0da215d37ed5","Type":"ContainerDied","Data":"016204b5c6bb67ac28917edf7322bdfa9a568f677d8d4f754e57d39edea938f8"} Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.312832 4592 generic.go:334] "Generic (PLEG): container finished" podID="784e7ce0-88bf-4f62-a9f2-945d9130750a" containerID="72ee675685325aaa10f59899167fceda6acc68b742ac3623c55287c15c8a1456" exitCode=0 Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.312865 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clmzt" event={"ID":"784e7ce0-88bf-4f62-a9f2-945d9130750a","Type":"ContainerDied","Data":"72ee675685325aaa10f59899167fceda6acc68b742ac3623c55287c15c8a1456"} Sep 29 17:08:33 crc kubenswrapper[4592]: I0929 17:08:33.312885 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clmzt" event={"ID":"784e7ce0-88bf-4f62-a9f2-945d9130750a","Type":"ContainerStarted","Data":"aac8c17cf3ce8a45ba4fd2f28c9d1f6cf03a4de17f55cf1ae92147ed493b67c7"} Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.567583 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.659732 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.660190 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-5w9kn" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="dnsmasq-dns" containerID="cri-o://63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b" gracePeriod=10 Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.863919 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.867366 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clmzt" Sep 29 17:08:34 crc kubenswrapper[4592]: E0929 17:08:34.868083 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50aa0c99_1d2e_4d25_8538_f7561e08fe27.slice/crio-63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50aa0c99_1d2e_4d25_8538_f7561e08fe27.slice/crio-conmon-63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.906503 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ptk2\" (UniqueName: \"kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2\") pod \"b6fab31a-6011-49cd-8191-0da215d37ed5\" (UID: \"b6fab31a-6011-49cd-8191-0da215d37ed5\") " Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.906926 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rds45\" (UniqueName: \"kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45\") pod \"784e7ce0-88bf-4f62-a9f2-945d9130750a\" (UID: \"784e7ce0-88bf-4f62-a9f2-945d9130750a\") " Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.928738 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2" (OuterVolumeSpecName: "kube-api-access-5ptk2") pod "b6fab31a-6011-49cd-8191-0da215d37ed5" (UID: "b6fab31a-6011-49cd-8191-0da215d37ed5"). InnerVolumeSpecName "kube-api-access-5ptk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:34 crc kubenswrapper[4592]: I0929 17:08:34.929749 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45" (OuterVolumeSpecName: "kube-api-access-rds45") pod "784e7ce0-88bf-4f62-a9f2-945d9130750a" (UID: "784e7ce0-88bf-4f62-a9f2-945d9130750a"). InnerVolumeSpecName "kube-api-access-rds45". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.008722 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ptk2\" (UniqueName: \"kubernetes.io/projected/b6fab31a-6011-49cd-8191-0da215d37ed5-kube-api-access-5ptk2\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.008759 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rds45\" (UniqueName: \"kubernetes.io/projected/784e7ce0-88bf-4f62-a9f2-945d9130750a-kube-api-access-rds45\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.269757 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.314928 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb\") pod \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.314982 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config\") pod \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.315024 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js9kz\" (UniqueName: \"kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz\") pod \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.315054 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb\") pod \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.315123 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc\") pod \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\" (UID: \"50aa0c99-1d2e-4d25-8538-f7561e08fe27\") " Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.324738 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz" (OuterVolumeSpecName: "kube-api-access-js9kz") pod "50aa0c99-1d2e-4d25-8538-f7561e08fe27" (UID: "50aa0c99-1d2e-4d25-8538-f7561e08fe27"). InnerVolumeSpecName "kube-api-access-js9kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.340448 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-clmzt" event={"ID":"784e7ce0-88bf-4f62-a9f2-945d9130750a","Type":"ContainerDied","Data":"aac8c17cf3ce8a45ba4fd2f28c9d1f6cf03a4de17f55cf1ae92147ed493b67c7"} Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.340491 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aac8c17cf3ce8a45ba4fd2f28c9d1f6cf03a4de17f55cf1ae92147ed493b67c7" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.340545 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-clmzt" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.342743 4592 generic.go:334] "Generic (PLEG): container finished" podID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerID="63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b" exitCode=0 Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.342794 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-5w9kn" event={"ID":"50aa0c99-1d2e-4d25-8538-f7561e08fe27","Type":"ContainerDied","Data":"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b"} Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.342813 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-5w9kn" event={"ID":"50aa0c99-1d2e-4d25-8538-f7561e08fe27","Type":"ContainerDied","Data":"a8db0f10bff98a0b91a7959f4b35925bba1cc2e47b7b67882760e1f3fef57f53"} Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.342839 4592 scope.go:117] "RemoveContainer" containerID="63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.342919 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-5w9kn" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.361916 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-26t8s" event={"ID":"b6fab31a-6011-49cd-8191-0da215d37ed5","Type":"ContainerDied","Data":"b0e40ac4955c0613ebaa1232e3168cfaf5ebadacf121ef8b2548db4e959425a0"} Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.361952 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0e40ac4955c0613ebaa1232e3168cfaf5ebadacf121ef8b2548db4e959425a0" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.361997 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-26t8s" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.404677 4592 scope.go:117] "RemoveContainer" containerID="6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.417348 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js9kz\" (UniqueName: \"kubernetes.io/projected/50aa0c99-1d2e-4d25-8538-f7561e08fe27-kube-api-access-js9kz\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.438352 4592 scope.go:117] "RemoveContainer" containerID="63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b" Sep 29 17:08:35 crc kubenswrapper[4592]: E0929 17:08:35.438881 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b\": container with ID starting with 63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b not found: ID does not exist" containerID="63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.438922 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b"} err="failed to get container status \"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b\": rpc error: code = NotFound desc = could not find container \"63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b\": container with ID starting with 63b91968699bea3ea3736cdd303fa57cd6dc35e199b1d826168b7fdcdb8f4f6b not found: ID does not exist" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.438947 4592 scope.go:117] "RemoveContainer" containerID="6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503" Sep 29 17:08:35 crc kubenswrapper[4592]: E0929 17:08:35.439279 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503\": container with ID starting with 6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503 not found: ID does not exist" containerID="6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.439304 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503"} err="failed to get container status \"6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503\": rpc error: code = NotFound desc = could not find container \"6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503\": container with ID starting with 6447e1c9c3751f2419e73e4df1acbef19fb7ec95ab4365d1fae17df0779d1503 not found: ID does not exist" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.439419 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "50aa0c99-1d2e-4d25-8538-f7561e08fe27" (UID: "50aa0c99-1d2e-4d25-8538-f7561e08fe27"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.442554 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config" (OuterVolumeSpecName: "config") pod "50aa0c99-1d2e-4d25-8538-f7561e08fe27" (UID: "50aa0c99-1d2e-4d25-8538-f7561e08fe27"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.455637 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "50aa0c99-1d2e-4d25-8538-f7561e08fe27" (UID: "50aa0c99-1d2e-4d25-8538-f7561e08fe27"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.462755 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "50aa0c99-1d2e-4d25-8538-f7561e08fe27" (UID: "50aa0c99-1d2e-4d25-8538-f7561e08fe27"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.519166 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.519201 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.519216 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.519227 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50aa0c99-1d2e-4d25-8538-f7561e08fe27-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.668380 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:35 crc kubenswrapper[4592]: I0929 17:08:35.673789 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-5w9kn"] Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855015 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-mswng"] Sep 29 17:08:36 crc kubenswrapper[4592]: E0929 17:08:36.855668 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="init" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855684 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="init" Sep 29 17:08:36 crc kubenswrapper[4592]: E0929 17:08:36.855709 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6fab31a-6011-49cd-8191-0da215d37ed5" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855715 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6fab31a-6011-49cd-8191-0da215d37ed5" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: E0929 17:08:36.855723 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="dnsmasq-dns" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855729 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="dnsmasq-dns" Sep 29 17:08:36 crc kubenswrapper[4592]: E0929 17:08:36.855739 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="784e7ce0-88bf-4f62-a9f2-945d9130750a" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855744 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="784e7ce0-88bf-4f62-a9f2-945d9130750a" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855902 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="784e7ce0-88bf-4f62-a9f2-945d9130750a" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855926 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6fab31a-6011-49cd-8191-0da215d37ed5" containerName="mariadb-database-create" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.855934 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" containerName="dnsmasq-dns" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.856444 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mswng" Sep 29 17:08:36 crc kubenswrapper[4592]: I0929 17:08:36.871898 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-mswng"] Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.041225 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99qrd\" (UniqueName: \"kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd\") pod \"glance-db-create-mswng\" (UID: \"2f9d25df-ce35-40af-999f-f80d6178e7c2\") " pod="openstack/glance-db-create-mswng" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.142604 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99qrd\" (UniqueName: \"kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd\") pod \"glance-db-create-mswng\" (UID: \"2f9d25df-ce35-40af-999f-f80d6178e7c2\") " pod="openstack/glance-db-create-mswng" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.172882 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99qrd\" (UniqueName: \"kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd\") pod \"glance-db-create-mswng\" (UID: \"2f9d25df-ce35-40af-999f-f80d6178e7c2\") " pod="openstack/glance-db-create-mswng" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.190229 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mswng" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.194224 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50aa0c99-1d2e-4d25-8538-f7561e08fe27" path="/var/lib/kubelet/pods/50aa0c99-1d2e-4d25-8538-f7561e08fe27/volumes" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.677832 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-mswng"] Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.748122 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Sep 29 17:08:37 crc kubenswrapper[4592]: I0929 17:08:37.922136 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jfzwf" podUID="d813cc31-c8ba-48c0-b523-3d2b3fbc3341" containerName="ovn-controller" probeResult="failure" output=< Sep 29 17:08:37 crc kubenswrapper[4592]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 17:08:37 crc kubenswrapper[4592]: > Sep 29 17:08:38 crc kubenswrapper[4592]: I0929 17:08:38.386652 4592 generic.go:334] "Generic (PLEG): container finished" podID="2f9d25df-ce35-40af-999f-f80d6178e7c2" containerID="5eaa2af99cedc681c75c46bc9c4c7ed7876f273bd73754754201446df74d811c" exitCode=0 Sep 29 17:08:38 crc kubenswrapper[4592]: I0929 17:08:38.386693 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mswng" event={"ID":"2f9d25df-ce35-40af-999f-f80d6178e7c2","Type":"ContainerDied","Data":"5eaa2af99cedc681c75c46bc9c4c7ed7876f273bd73754754201446df74d811c"} Sep 29 17:08:38 crc kubenswrapper[4592]: I0929 17:08:38.386718 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mswng" event={"ID":"2f9d25df-ce35-40af-999f-f80d6178e7c2","Type":"ContainerStarted","Data":"1cd805f11b04b5029605a31734e07201551e3f779eec28237cf0ee5f040f179a"} Sep 29 17:08:39 crc kubenswrapper[4592]: I0929 17:08:39.792894 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mswng" Sep 29 17:08:39 crc kubenswrapper[4592]: I0929 17:08:39.887173 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99qrd\" (UniqueName: \"kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd\") pod \"2f9d25df-ce35-40af-999f-f80d6178e7c2\" (UID: \"2f9d25df-ce35-40af-999f-f80d6178e7c2\") " Sep 29 17:08:39 crc kubenswrapper[4592]: I0929 17:08:39.897221 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd" (OuterVolumeSpecName: "kube-api-access-99qrd") pod "2f9d25df-ce35-40af-999f-f80d6178e7c2" (UID: "2f9d25df-ce35-40af-999f-f80d6178e7c2"). InnerVolumeSpecName "kube-api-access-99qrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:39 crc kubenswrapper[4592]: I0929 17:08:39.988752 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99qrd\" (UniqueName: \"kubernetes.io/projected/2f9d25df-ce35-40af-999f-f80d6178e7c2-kube-api-access-99qrd\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:40 crc kubenswrapper[4592]: I0929 17:08:40.403055 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-mswng" Sep 29 17:08:40 crc kubenswrapper[4592]: I0929 17:08:40.403065 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-mswng" event={"ID":"2f9d25df-ce35-40af-999f-f80d6178e7c2","Type":"ContainerDied","Data":"1cd805f11b04b5029605a31734e07201551e3f779eec28237cf0ee5f040f179a"} Sep 29 17:08:40 crc kubenswrapper[4592]: I0929 17:08:40.403622 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cd805f11b04b5029605a31734e07201551e3f779eec28237cf0ee5f040f179a" Sep 29 17:08:40 crc kubenswrapper[4592]: I0929 17:08:40.404413 4592 generic.go:334] "Generic (PLEG): container finished" podID="b87c021d-8ea5-4e65-9a34-68e38d02b6c3" containerID="ec2916f7c6c40ca3e7af0bbd245d6de02dc517915801e32283a03eb03d69445c" exitCode=0 Sep 29 17:08:40 crc kubenswrapper[4592]: I0929 17:08:40.404446 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kk56p" event={"ID":"b87c021d-8ea5-4e65-9a34-68e38d02b6c3","Type":"ContainerDied","Data":"ec2916f7c6c40ca3e7af0bbd245d6de02dc517915801e32283a03eb03d69445c"} Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.307918 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.318055 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f64e72d-c39e-45fa-b3df-ae8624976e86-etc-swift\") pod \"swift-storage-0\" (UID: \"3f64e72d-c39e-45fa-b3df-ae8624976e86\") " pod="openstack/swift-storage-0" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.596241 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.601982 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-9a08-account-create-b4zwb"] Sep 29 17:08:41 crc kubenswrapper[4592]: E0929 17:08:41.603653 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9d25df-ce35-40af-999f-f80d6178e7c2" containerName="mariadb-database-create" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.603669 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9d25df-ce35-40af-999f-f80d6178e7c2" containerName="mariadb-database-create" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.606375 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f9d25df-ce35-40af-999f-f80d6178e7c2" containerName="mariadb-database-create" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.607102 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.609649 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.656306 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9a08-account-create-b4zwb"] Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.720326 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njkcj\" (UniqueName: \"kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj\") pod \"keystone-9a08-account-create-b4zwb\" (UID: \"daf5a84e-448e-458b-add1-59b8bf1bfc30\") " pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.822379 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njkcj\" (UniqueName: \"kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj\") pod \"keystone-9a08-account-create-b4zwb\" (UID: \"daf5a84e-448e-458b-add1-59b8bf1bfc30\") " pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.856699 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njkcj\" (UniqueName: \"kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj\") pod \"keystone-9a08-account-create-b4zwb\" (UID: \"daf5a84e-448e-458b-add1-59b8bf1bfc30\") " pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.873392 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.937609 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.998340 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e597-account-create-d9sx7"] Sep 29 17:08:41 crc kubenswrapper[4592]: E0929 17:08:41.998655 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b87c021d-8ea5-4e65-9a34-68e38d02b6c3" containerName="swift-ring-rebalance" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.998670 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b87c021d-8ea5-4e65-9a34-68e38d02b6c3" containerName="swift-ring-rebalance" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.998889 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b87c021d-8ea5-4e65-9a34-68e38d02b6c3" containerName="swift-ring-rebalance" Sep 29 17:08:41 crc kubenswrapper[4592]: I0929 17:08:41.999422 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.001900 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.022643 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e597-account-create-d9sx7"] Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024549 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024622 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024652 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024703 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024872 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024910 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.024946 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmm4m\" (UniqueName: \"kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m\") pod \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\" (UID: \"b87c021d-8ea5-4e65-9a34-68e38d02b6c3\") " Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.026587 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.041999 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.047848 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m" (OuterVolumeSpecName: "kube-api-access-bmm4m") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "kube-api-access-bmm4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.050036 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.061867 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts" (OuterVolumeSpecName: "scripts") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.079391 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.081308 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b87c021d-8ea5-4e65-9a34-68e38d02b6c3" (UID: "b87c021d-8ea5-4e65-9a34-68e38d02b6c3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126644 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smvnq\" (UniqueName: \"kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq\") pod \"placement-e597-account-create-d9sx7\" (UID: \"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23\") " pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126716 4592 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-dispersionconf\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126775 4592 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126814 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmm4m\" (UniqueName: \"kubernetes.io/projected/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-kube-api-access-bmm4m\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126909 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126969 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126983 4592 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-swiftconf\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.126993 4592 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b87c021d-8ea5-4e65-9a34-68e38d02b6c3-etc-swift\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.229309 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smvnq\" (UniqueName: \"kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq\") pod \"placement-e597-account-create-d9sx7\" (UID: \"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23\") " pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.268065 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smvnq\" (UniqueName: \"kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq\") pod \"placement-e597-account-create-d9sx7\" (UID: \"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23\") " pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.343648 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.386939 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Sep 29 17:08:42 crc kubenswrapper[4592]: W0929 17:08:42.398659 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f64e72d_c39e_45fa_b3df_ae8624976e86.slice/crio-2c0c89cfdba5945e77d4357d2cf8f088e087e84411bdfd261f1b35fa3a6658b0 WatchSource:0}: Error finding container 2c0c89cfdba5945e77d4357d2cf8f088e087e84411bdfd261f1b35fa3a6658b0: Status 404 returned error can't find the container with id 2c0c89cfdba5945e77d4357d2cf8f088e087e84411bdfd261f1b35fa3a6658b0 Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.429339 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-kk56p" event={"ID":"b87c021d-8ea5-4e65-9a34-68e38d02b6c3","Type":"ContainerDied","Data":"9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125"} Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.429377 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f0ffa4c42e2c0b73e723116299da908ab1d5b95eb602fe00d91bc1edc73e125" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.429444 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-kk56p" Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.435217 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"2c0c89cfdba5945e77d4357d2cf8f088e087e84411bdfd261f1b35fa3a6658b0"} Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.464891 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-9a08-account-create-b4zwb"] Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.895595 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e597-account-create-d9sx7"] Sep 29 17:08:42 crc kubenswrapper[4592]: I0929 17:08:42.923410 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jfzwf" podUID="d813cc31-c8ba-48c0-b523-3d2b3fbc3341" containerName="ovn-controller" probeResult="failure" output=< Sep 29 17:08:42 crc kubenswrapper[4592]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 17:08:42 crc kubenswrapper[4592]: > Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.251603 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.450782 4592 generic.go:334] "Generic (PLEG): container finished" podID="4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" containerID="35a7d5f2265b9c8040df77c2f722e28e2e2c8ee2740e9cfdb9c14c4169dd5bb7" exitCode=0 Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.450844 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e597-account-create-d9sx7" event={"ID":"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23","Type":"ContainerDied","Data":"35a7d5f2265b9c8040df77c2f722e28e2e2c8ee2740e9cfdb9c14c4169dd5bb7"} Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.450871 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e597-account-create-d9sx7" event={"ID":"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23","Type":"ContainerStarted","Data":"524ee968988ddc177b684d3bf8296f66727cdc9f46eda2fc9754da0455937c2e"} Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.453385 4592 generic.go:334] "Generic (PLEG): container finished" podID="daf5a84e-448e-458b-add1-59b8bf1bfc30" containerID="00f2a60b3610dbf30b9cd143ac1a4c4b1873aab80e916871ee7fa11bbc0b4897" exitCode=0 Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.453415 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a08-account-create-b4zwb" event={"ID":"daf5a84e-448e-458b-add1-59b8bf1bfc30","Type":"ContainerDied","Data":"00f2a60b3610dbf30b9cd143ac1a4c4b1873aab80e916871ee7fa11bbc0b4897"} Sep 29 17:08:43 crc kubenswrapper[4592]: I0929 17:08:43.453437 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a08-account-create-b4zwb" event={"ID":"daf5a84e-448e-458b-add1-59b8bf1bfc30","Type":"ContainerStarted","Data":"c87b462493b9fff39adfedb5df73ea43c787150759535434ecc0c63b73eeb680"} Sep 29 17:08:44 crc kubenswrapper[4592]: I0929 17:08:44.890445 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:44 crc kubenswrapper[4592]: I0929 17:08:44.963523 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:44 crc kubenswrapper[4592]: I0929 17:08:44.995202 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njkcj\" (UniqueName: \"kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj\") pod \"daf5a84e-448e-458b-add1-59b8bf1bfc30\" (UID: \"daf5a84e-448e-458b-add1-59b8bf1bfc30\") " Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.001298 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj" (OuterVolumeSpecName: "kube-api-access-njkcj") pod "daf5a84e-448e-458b-add1-59b8bf1bfc30" (UID: "daf5a84e-448e-458b-add1-59b8bf1bfc30"). InnerVolumeSpecName "kube-api-access-njkcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.097222 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smvnq\" (UniqueName: \"kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq\") pod \"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23\" (UID: \"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23\") " Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.097721 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njkcj\" (UniqueName: \"kubernetes.io/projected/daf5a84e-448e-458b-add1-59b8bf1bfc30-kube-api-access-njkcj\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.100514 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq" (OuterVolumeSpecName: "kube-api-access-smvnq") pod "4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" (UID: "4906ee82-cf4d-4ee7-9bba-d3953ab8ee23"). InnerVolumeSpecName "kube-api-access-smvnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.199877 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smvnq\" (UniqueName: \"kubernetes.io/projected/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23-kube-api-access-smvnq\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.494253 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e597-account-create-d9sx7" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.494231 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e597-account-create-d9sx7" event={"ID":"4906ee82-cf4d-4ee7-9bba-d3953ab8ee23","Type":"ContainerDied","Data":"524ee968988ddc177b684d3bf8296f66727cdc9f46eda2fc9754da0455937c2e"} Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.494372 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="524ee968988ddc177b684d3bf8296f66727cdc9f46eda2fc9754da0455937c2e" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.495734 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-9a08-account-create-b4zwb" event={"ID":"daf5a84e-448e-458b-add1-59b8bf1bfc30","Type":"ContainerDied","Data":"c87b462493b9fff39adfedb5df73ea43c787150759535434ecc0c63b73eeb680"} Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.495768 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c87b462493b9fff39adfedb5df73ea43c787150759535434ecc0c63b73eeb680" Sep 29 17:08:45 crc kubenswrapper[4592]: I0929 17:08:45.495804 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-9a08-account-create-b4zwb" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.930894 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-415a-account-create-8r6tt"] Sep 29 17:08:46 crc kubenswrapper[4592]: E0929 17:08:46.931581 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daf5a84e-448e-458b-add1-59b8bf1bfc30" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.931599 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="daf5a84e-448e-458b-add1-59b8bf1bfc30" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: E0929 17:08:46.931631 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.931639 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.931818 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.931854 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="daf5a84e-448e-458b-add1-59b8bf1bfc30" containerName="mariadb-account-create" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.932499 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.936442 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Sep 29 17:08:46 crc kubenswrapper[4592]: I0929 17:08:46.944421 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-415a-account-create-8r6tt"] Sep 29 17:08:47 crc kubenswrapper[4592]: I0929 17:08:47.046470 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpcd5\" (UniqueName: \"kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5\") pod \"glance-415a-account-create-8r6tt\" (UID: \"64a1fa2f-51dd-4755-8823-93be4cbbf71c\") " pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:47 crc kubenswrapper[4592]: I0929 17:08:47.147859 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpcd5\" (UniqueName: \"kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5\") pod \"glance-415a-account-create-8r6tt\" (UID: \"64a1fa2f-51dd-4755-8823-93be4cbbf71c\") " pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:47 crc kubenswrapper[4592]: I0929 17:08:47.167765 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpcd5\" (UniqueName: \"kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5\") pod \"glance-415a-account-create-8r6tt\" (UID: \"64a1fa2f-51dd-4755-8823-93be4cbbf71c\") " pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:47 crc kubenswrapper[4592]: I0929 17:08:47.266475 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:47 crc kubenswrapper[4592]: I0929 17:08:47.922554 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jfzwf" podUID="d813cc31-c8ba-48c0-b523-3d2b3fbc3341" containerName="ovn-controller" probeResult="failure" output=< Sep 29 17:08:47 crc kubenswrapper[4592]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Sep 29 17:08:47 crc kubenswrapper[4592]: > Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.188090 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.252135 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7x4wp" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.451417 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jfzwf-config-tvgpf"] Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.452419 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.455168 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.465527 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jfzwf-config-tvgpf"] Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.566950 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572207 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572296 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572495 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572585 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572619 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl7lm\" (UniqueName: \"kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.572685 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674676 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674730 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674746 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl7lm\" (UniqueName: \"kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674773 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674855 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.674898 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.675435 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.675491 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.675491 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.675861 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.677097 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.699273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl7lm\" (UniqueName: \"kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm\") pod \"ovn-controller-jfzwf-config-tvgpf\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:48 crc kubenswrapper[4592]: I0929 17:08:48.774748 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.277461 4592 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod436f5852-e2a7-4374-ad72-e9f4c63a046b"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod436f5852-e2a7-4374-ad72-e9f4c63a046b] : Timed out while waiting for systemd to remove kubepods-besteffort-pod436f5852_e2a7_4374_ad72_e9f4c63a046b.slice" Sep 29 17:08:49 crc kubenswrapper[4592]: E0929 17:08:49.277734 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod436f5852-e2a7-4374-ad72-e9f4c63a046b] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod436f5852-e2a7-4374-ad72-e9f4c63a046b] : Timed out while waiting for systemd to remove kubepods-besteffort-pod436f5852_e2a7_4374_ad72_e9f4c63a046b.slice" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.533200 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-qq7dq" Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.533189 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"a16c7eb0664f8a5a1a327acf67be583cd527cb9dc112ca8b8edf8a49738e9de0"} Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.569439 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-415a-account-create-8r6tt"] Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.586237 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jfzwf-config-tvgpf"] Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.618281 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:08:49 crc kubenswrapper[4592]: I0929 17:08:49.625708 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-qq7dq"] Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.542486 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"da4ec354773a4f1ea44a454d0ae5b18739686b4c18f30a3b5973f0a8f6b83cd4"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.543716 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"9b44915a55fc703bee40e0ebd0241491820aa7cd0e575d822e50e062125c3352"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.543967 4592 generic.go:334] "Generic (PLEG): container finished" podID="64a1fa2f-51dd-4755-8823-93be4cbbf71c" containerID="2d7a529c3d95d1440c07ec0f0f2914651b3cb75d784490aa13082f457665d33c" exitCode=0 Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.544022 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-415a-account-create-8r6tt" event={"ID":"64a1fa2f-51dd-4755-8823-93be4cbbf71c","Type":"ContainerDied","Data":"2d7a529c3d95d1440c07ec0f0f2914651b3cb75d784490aa13082f457665d33c"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.544240 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-415a-account-create-8r6tt" event={"ID":"64a1fa2f-51dd-4755-8823-93be4cbbf71c","Type":"ContainerStarted","Data":"5e5639e136194351ef6c5d203df7b86056070d4084223dccf45b3c198902c732"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.545551 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf-config-tvgpf" event={"ID":"0a0bf905-6b6a-488b-b88c-5638885c540d","Type":"ContainerStarted","Data":"799499baf640565a941a7359236338472fdac23e1ea3e20950cdba5240f415c2"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.545605 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf-config-tvgpf" event={"ID":"0a0bf905-6b6a-488b-b88c-5638885c540d","Type":"ContainerStarted","Data":"043b90c629d7c30ad97410c991947035acba5c5e61c498c979fdcc6a226acb64"} Sep 29 17:08:50 crc kubenswrapper[4592]: I0929 17:08:50.581485 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jfzwf-config-tvgpf" podStartSLOduration=2.581468783 podStartE2EDuration="2.581468783s" podCreationTimestamp="2025-09-29 17:08:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:08:50.574733491 +0000 UTC m=+1060.722511192" watchObservedRunningTime="2025-09-29 17:08:50.581468783 +0000 UTC m=+1060.729246464" Sep 29 17:08:51 crc kubenswrapper[4592]: I0929 17:08:51.206726 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="436f5852-e2a7-4374-ad72-e9f4c63a046b" path="/var/lib/kubelet/pods/436f5852-e2a7-4374-ad72-e9f4c63a046b/volumes" Sep 29 17:08:51 crc kubenswrapper[4592]: I0929 17:08:51.560553 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"232fb95501102cb1e5075be32e9b1579a735d5de2f0641afab4755cf413817e5"} Sep 29 17:08:51 crc kubenswrapper[4592]: I0929 17:08:51.563486 4592 generic.go:334] "Generic (PLEG): container finished" podID="0a0bf905-6b6a-488b-b88c-5638885c540d" containerID="799499baf640565a941a7359236338472fdac23e1ea3e20950cdba5240f415c2" exitCode=0 Sep 29 17:08:51 crc kubenswrapper[4592]: I0929 17:08:51.563574 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf-config-tvgpf" event={"ID":"0a0bf905-6b6a-488b-b88c-5638885c540d","Type":"ContainerDied","Data":"799499baf640565a941a7359236338472fdac23e1ea3e20950cdba5240f415c2"} Sep 29 17:08:51 crc kubenswrapper[4592]: I0929 17:08:51.890133 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.042846 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpcd5\" (UniqueName: \"kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5\") pod \"64a1fa2f-51dd-4755-8823-93be4cbbf71c\" (UID: \"64a1fa2f-51dd-4755-8823-93be4cbbf71c\") " Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.048462 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5" (OuterVolumeSpecName: "kube-api-access-hpcd5") pod "64a1fa2f-51dd-4755-8823-93be4cbbf71c" (UID: "64a1fa2f-51dd-4755-8823-93be4cbbf71c"). InnerVolumeSpecName "kube-api-access-hpcd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.144424 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpcd5\" (UniqueName: \"kubernetes.io/projected/64a1fa2f-51dd-4755-8823-93be4cbbf71c-kube-api-access-hpcd5\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.584701 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-415a-account-create-8r6tt" event={"ID":"64a1fa2f-51dd-4755-8823-93be4cbbf71c","Type":"ContainerDied","Data":"5e5639e136194351ef6c5d203df7b86056070d4084223dccf45b3c198902c732"} Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.585136 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e5639e136194351ef6c5d203df7b86056070d4084223dccf45b3c198902c732" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.585106 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-415a-account-create-8r6tt" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.955918 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jfzwf" Sep 29 17:08:52 crc kubenswrapper[4592]: I0929 17:08:52.979237 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161409 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl7lm\" (UniqueName: \"kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161507 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161530 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161554 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161607 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161641 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts\") pod \"0a0bf905-6b6a-488b-b88c-5638885c540d\" (UID: \"0a0bf905-6b6a-488b-b88c-5638885c540d\") " Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161604 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161644 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161879 4592 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.161892 4592 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.162552 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.162631 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run" (OuterVolumeSpecName: "var-run") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.163741 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts" (OuterVolumeSpecName: "scripts") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.167340 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm" (OuterVolumeSpecName: "kube-api-access-tl7lm") pod "0a0bf905-6b6a-488b-b88c-5638885c540d" (UID: "0a0bf905-6b6a-488b-b88c-5638885c540d"). InnerVolumeSpecName "kube-api-access-tl7lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.263960 4592 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0a0bf905-6b6a-488b-b88c-5638885c540d-var-run\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.263985 4592 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-additional-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.263996 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a0bf905-6b6a-488b-b88c-5638885c540d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.264005 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl7lm\" (UniqueName: \"kubernetes.io/projected/0a0bf905-6b6a-488b-b88c-5638885c540d-kube-api-access-tl7lm\") on node \"crc\" DevicePath \"\"" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.593724 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jfzwf-config-tvgpf" event={"ID":"0a0bf905-6b6a-488b-b88c-5638885c540d","Type":"ContainerDied","Data":"043b90c629d7c30ad97410c991947035acba5c5e61c498c979fdcc6a226acb64"} Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.593765 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="043b90c629d7c30ad97410c991947035acba5c5e61c498c979fdcc6a226acb64" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.593819 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jfzwf-config-tvgpf" Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.672150 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jfzwf-config-tvgpf"] Sep 29 17:08:53 crc kubenswrapper[4592]: I0929 17:08:53.680508 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jfzwf-config-tvgpf"] Sep 29 17:08:54 crc kubenswrapper[4592]: I0929 17:08:54.607224 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"f57a283855778a1a514a4cf532f5ecab7a950c72f0b32c07604a3943cb0bc2d2"} Sep 29 17:08:54 crc kubenswrapper[4592]: I0929 17:08:54.608020 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"da1dab1b92884e87791dcc2335008ee06d47b6d8e94b377234205f80031f33b5"} Sep 29 17:08:54 crc kubenswrapper[4592]: I0929 17:08:54.608098 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"70fabb409bf06f105f0d57449a090acf1faed39737b333a79a40b07bdca07624"} Sep 29 17:08:54 crc kubenswrapper[4592]: I0929 17:08:54.608201 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"7c7bbcba82544310187bbae42f3c70720c7930363ad53c598c217d24b55af457"} Sep 29 17:08:55 crc kubenswrapper[4592]: I0929 17:08:55.198843 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a0bf905-6b6a-488b-b88c-5638885c540d" path="/var/lib/kubelet/pods/0a0bf905-6b6a-488b-b88c-5638885c540d/volumes" Sep 29 17:08:56 crc kubenswrapper[4592]: I0929 17:08:56.629420 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"f7f1ba6adbd29a185a64af26b2bedaf8040a8f4180ab0d87f746f81299321567"} Sep 29 17:08:56 crc kubenswrapper[4592]: I0929 17:08:56.629945 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"996fc5fd4b69f20526d5720b70a1e82ba7ad63046395d9142eb735e1f500140b"} Sep 29 17:08:56 crc kubenswrapper[4592]: I0929 17:08:56.629961 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"52d95dd5a1d65a8a53bd34dc7b88fa45ccc3ddd602dc3a0043763e7bf8005186"} Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.092184 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-9gcq7"] Sep 29 17:08:57 crc kubenswrapper[4592]: E0929 17:08:57.092535 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64a1fa2f-51dd-4755-8823-93be4cbbf71c" containerName="mariadb-account-create" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.092550 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="64a1fa2f-51dd-4755-8823-93be4cbbf71c" containerName="mariadb-account-create" Sep 29 17:08:57 crc kubenswrapper[4592]: E0929 17:08:57.092565 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a0bf905-6b6a-488b-b88c-5638885c540d" containerName="ovn-config" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.092571 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a0bf905-6b6a-488b-b88c-5638885c540d" containerName="ovn-config" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.092729 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="64a1fa2f-51dd-4755-8823-93be4cbbf71c" containerName="mariadb-account-create" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.092765 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a0bf905-6b6a-488b-b88c-5638885c540d" containerName="ovn-config" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.093254 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.098225 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.101415 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f6zww" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.110556 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9gcq7"] Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.240194 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.240270 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.240413 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.240537 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpmc4\" (UniqueName: \"kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.342399 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.342488 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.342530 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.342586 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpmc4\" (UniqueName: \"kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.348627 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.349645 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.353005 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.360980 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpmc4\" (UniqueName: \"kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4\") pod \"glance-db-sync-9gcq7\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.411368 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9gcq7" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.650910 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"b1537d71a158b0817aa65a5c21fb7b73bc77033195efca2ab4ef27899a25b422"} Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.650980 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"6e7496b02159edac6fc670dac23552bf3132d0c01a01fdbacb6300acec52b3e2"} Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.650994 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"81cabc59d9e22ce4d5670bdbeae797f052396efbff6ccbd7c5a7fde90a3d153f"} Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.651031 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f64e72d-c39e-45fa-b3df-ae8624976e86","Type":"ContainerStarted","Data":"96c2ff4b63a53402f4d73c72bb508a25c43b03c7865e51c0ce8a8754fb667dd2"} Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.728034 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.282422181 podStartE2EDuration="33.72801914s" podCreationTimestamp="2025-09-29 17:08:24 +0000 UTC" firstStartedPulling="2025-09-29 17:08:42.400997035 +0000 UTC m=+1052.548774716" lastFinishedPulling="2025-09-29 17:08:55.846593994 +0000 UTC m=+1065.994371675" observedRunningTime="2025-09-29 17:08:57.723879969 +0000 UTC m=+1067.871657650" watchObservedRunningTime="2025-09-29 17:08:57.72801914 +0000 UTC m=+1067.875796821" Sep 29 17:08:57 crc kubenswrapper[4592]: I0929 17:08:57.820846 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-9gcq7"] Sep 29 17:08:57 crc kubenswrapper[4592]: W0929 17:08:57.826968 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf760ecfd_a454_4a77_89c3_0703ea63c515.slice/crio-ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48 WatchSource:0}: Error finding container ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48: Status 404 returned error can't find the container with id ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48 Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.002169 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.003476 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.005610 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.015392 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.053795 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.053926 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.054011 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.054036 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.054065 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kphg\" (UniqueName: \"kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.054087 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156087 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156184 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156213 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kphg\" (UniqueName: \"kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156237 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156311 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.156384 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.157898 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.157927 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.158003 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.158356 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.159234 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.179315 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kphg\" (UniqueName: \"kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg\") pod \"dnsmasq-dns-6d5b6d6b67-n87jh\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.188762 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.321326 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.527261 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-pnxjz"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.528257 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pnxjz" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.549230 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-pnxjz"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.576360 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.666915 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9gcq7" event={"ID":"f760ecfd-a454-4a77-89c3-0703ea63c515","Type":"ContainerStarted","Data":"ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48"} Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.684087 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlnws\" (UniqueName: \"kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws\") pod \"cinder-db-create-pnxjz\" (UID: \"3797e303-3118-4d4b-a6da-78e5737d8fcc\") " pod="openstack/cinder-db-create-pnxjz" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.737900 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-kp4jw"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.738863 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kp4jw" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.786332 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlnws\" (UniqueName: \"kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws\") pod \"cinder-db-create-pnxjz\" (UID: \"3797e303-3118-4d4b-a6da-78e5737d8fcc\") " pod="openstack/cinder-db-create-pnxjz" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.790699 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kp4jw"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.836835 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlnws\" (UniqueName: \"kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws\") pod \"cinder-db-create-pnxjz\" (UID: \"3797e303-3118-4d4b-a6da-78e5737d8fcc\") " pod="openstack/cinder-db-create-pnxjz" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.854131 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pnxjz" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.887750 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsv9v\" (UniqueName: \"kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v\") pod \"barbican-db-create-kp4jw\" (UID: \"0be0b236-d9e2-4cc1-88dd-264436334ae2\") " pod="openstack/barbican-db-create-kp4jw" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.962472 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-h9l7h"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.963537 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9l7h" Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.978409 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h9l7h"] Sep 29 17:08:58 crc kubenswrapper[4592]: I0929 17:08:58.991582 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsv9v\" (UniqueName: \"kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v\") pod \"barbican-db-create-kp4jw\" (UID: \"0be0b236-d9e2-4cc1-88dd-264436334ae2\") " pod="openstack/barbican-db-create-kp4jw" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.025582 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsv9v\" (UniqueName: \"kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v\") pod \"barbican-db-create-kp4jw\" (UID: \"0be0b236-d9e2-4cc1-88dd-264436334ae2\") " pod="openstack/barbican-db-create-kp4jw" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.070725 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kp4jw" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.096474 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2drf8\" (UniqueName: \"kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8\") pod \"neutron-db-create-h9l7h\" (UID: \"2e5e30f6-50a2-4615-97a4-8e666b41c54a\") " pod="openstack/neutron-db-create-h9l7h" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.139639 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.199649 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2drf8\" (UniqueName: \"kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8\") pod \"neutron-db-create-h9l7h\" (UID: \"2e5e30f6-50a2-4615-97a4-8e666b41c54a\") " pod="openstack/neutron-db-create-h9l7h" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.232321 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-rplcj"] Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.233316 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.238844 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.238966 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.238844 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wm4xb" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.245732 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.281693 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-rplcj"] Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.300769 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.301099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.301507 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmnd7\" (UniqueName: \"kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.307589 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2drf8\" (UniqueName: \"kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8\") pod \"neutron-db-create-h9l7h\" (UID: \"2e5e30f6-50a2-4615-97a4-8e666b41c54a\") " pod="openstack/neutron-db-create-h9l7h" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.403076 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.403250 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmnd7\" (UniqueName: \"kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.403287 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.406579 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.419870 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.427751 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmnd7\" (UniqueName: \"kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7\") pod \"keystone-db-sync-rplcj\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.566043 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rplcj" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.606904 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9l7h" Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.696237 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" event={"ID":"ac859b1e-0413-4d50-ae61-ddc342af6877","Type":"ContainerStarted","Data":"e4ddb2739270f099679bf687e231ffd29b2b7527eed009750f9ba2f2fedcdefc"} Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.745051 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-pnxjz"] Sep 29 17:08:59 crc kubenswrapper[4592]: I0929 17:08:59.832195 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-kp4jw"] Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.018119 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-rplcj"] Sep 29 17:09:00 crc kubenswrapper[4592]: W0929 17:09:00.063550 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94891c4d_d21e_419f_9ca9_ce48bef5b069.slice/crio-e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731 WatchSource:0}: Error finding container e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731: Status 404 returned error can't find the container with id e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.152085 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-h9l7h"] Sep 29 17:09:00 crc kubenswrapper[4592]: W0929 17:09:00.209361 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e5e30f6_50a2_4615_97a4_8e666b41c54a.slice/crio-399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469 WatchSource:0}: Error finding container 399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469: Status 404 returned error can't find the container with id 399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.716578 4592 generic.go:334] "Generic (PLEG): container finished" podID="2e5e30f6-50a2-4615-97a4-8e666b41c54a" containerID="f5b6b298c1c03bfc8bfd076589e1bd4232cf6bc7608e520b6a2c613465c57455" exitCode=0 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.717847 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9l7h" event={"ID":"2e5e30f6-50a2-4615-97a4-8e666b41c54a","Type":"ContainerDied","Data":"f5b6b298c1c03bfc8bfd076589e1bd4232cf6bc7608e520b6a2c613465c57455"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.718887 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9l7h" event={"ID":"2e5e30f6-50a2-4615-97a4-8e666b41c54a","Type":"ContainerStarted","Data":"399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.724773 4592 generic.go:334] "Generic (PLEG): container finished" podID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerID="0be83540b9bf9ecddfc33e1ab6a5408b79dc91741ab99d8c81d5c95d4512da60" exitCode=0 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.724835 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" event={"ID":"ac859b1e-0413-4d50-ae61-ddc342af6877","Type":"ContainerDied","Data":"0be83540b9bf9ecddfc33e1ab6a5408b79dc91741ab99d8c81d5c95d4512da60"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.732455 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rplcj" event={"ID":"94891c4d-d21e-419f-9ca9-ce48bef5b069","Type":"ContainerStarted","Data":"e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.742656 4592 generic.go:334] "Generic (PLEG): container finished" podID="0be0b236-d9e2-4cc1-88dd-264436334ae2" containerID="6ded64817cfe114aa58af543f1922a0811bd7f68a09f6190aac6c5885cb90b26" exitCode=0 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.742722 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kp4jw" event={"ID":"0be0b236-d9e2-4cc1-88dd-264436334ae2","Type":"ContainerDied","Data":"6ded64817cfe114aa58af543f1922a0811bd7f68a09f6190aac6c5885cb90b26"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.742749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kp4jw" event={"ID":"0be0b236-d9e2-4cc1-88dd-264436334ae2","Type":"ContainerStarted","Data":"3029522e5ce981ce0c8a2c7fbbaa8002e78b3b65c4281993598772fc3f593fd2"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.744650 4592 generic.go:334] "Generic (PLEG): container finished" podID="3797e303-3118-4d4b-a6da-78e5737d8fcc" containerID="d9e83a97ef8961a128edc1281cc64d6f08aa209746f0adb5b448891ca16bb9a0" exitCode=0 Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.744685 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pnxjz" event={"ID":"3797e303-3118-4d4b-a6da-78e5737d8fcc","Type":"ContainerDied","Data":"d9e83a97ef8961a128edc1281cc64d6f08aa209746f0adb5b448891ca16bb9a0"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.744708 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pnxjz" event={"ID":"3797e303-3118-4d4b-a6da-78e5737d8fcc","Type":"ContainerStarted","Data":"740c38a538fca640454299f837a419ee5e3eccd12cc3f3d47d8d4c2121e37404"} Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.882877 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.882973 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.883013 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.883760 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:09:00 crc kubenswrapper[4592]: I0929 17:09:00.883829 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1" gracePeriod=600 Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.760612 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1" exitCode=0 Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.760672 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1"} Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.761270 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb"} Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.761292 4592 scope.go:117] "RemoveContainer" containerID="c27ed29df7ad1d8fe01e00a1b4d9831c1ed68234be9201ca2428b03bb210eaae" Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.767732 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" event={"ID":"ac859b1e-0413-4d50-ae61-ddc342af6877","Type":"ContainerStarted","Data":"0d29eeaedce4647b7557c90e28ef940daf142fb9d1c2e55bc2c8f1cd3d8f1d97"} Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.767780 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:09:01 crc kubenswrapper[4592]: I0929 17:09:01.825944 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" podStartSLOduration=4.825922739 podStartE2EDuration="4.825922739s" podCreationTimestamp="2025-09-29 17:08:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:09:01.810365405 +0000 UTC m=+1071.958143086" watchObservedRunningTime="2025-09-29 17:09:01.825922739 +0000 UTC m=+1071.973700420" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.267269 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pnxjz" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.382109 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlnws\" (UniqueName: \"kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws\") pod \"3797e303-3118-4d4b-a6da-78e5737d8fcc\" (UID: \"3797e303-3118-4d4b-a6da-78e5737d8fcc\") " Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.405479 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws" (OuterVolumeSpecName: "kube-api-access-hlnws") pod "3797e303-3118-4d4b-a6da-78e5737d8fcc" (UID: "3797e303-3118-4d4b-a6da-78e5737d8fcc"). InnerVolumeSpecName "kube-api-access-hlnws". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.488643 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlnws\" (UniqueName: \"kubernetes.io/projected/3797e303-3118-4d4b-a6da-78e5737d8fcc-kube-api-access-hlnws\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.529028 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kp4jw" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.534084 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9l7h" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.691123 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2drf8\" (UniqueName: \"kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8\") pod \"2e5e30f6-50a2-4615-97a4-8e666b41c54a\" (UID: \"2e5e30f6-50a2-4615-97a4-8e666b41c54a\") " Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.691197 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsv9v\" (UniqueName: \"kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v\") pod \"0be0b236-d9e2-4cc1-88dd-264436334ae2\" (UID: \"0be0b236-d9e2-4cc1-88dd-264436334ae2\") " Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.695410 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v" (OuterVolumeSpecName: "kube-api-access-tsv9v") pod "0be0b236-d9e2-4cc1-88dd-264436334ae2" (UID: "0be0b236-d9e2-4cc1-88dd-264436334ae2"). InnerVolumeSpecName "kube-api-access-tsv9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.696466 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8" (OuterVolumeSpecName: "kube-api-access-2drf8") pod "2e5e30f6-50a2-4615-97a4-8e666b41c54a" (UID: "2e5e30f6-50a2-4615-97a4-8e666b41c54a"). InnerVolumeSpecName "kube-api-access-2drf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.798838 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2drf8\" (UniqueName: \"kubernetes.io/projected/2e5e30f6-50a2-4615-97a4-8e666b41c54a-kube-api-access-2drf8\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.798887 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsv9v\" (UniqueName: \"kubernetes.io/projected/0be0b236-d9e2-4cc1-88dd-264436334ae2-kube-api-access-tsv9v\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.829282 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-pnxjz" event={"ID":"3797e303-3118-4d4b-a6da-78e5737d8fcc","Type":"ContainerDied","Data":"740c38a538fca640454299f837a419ee5e3eccd12cc3f3d47d8d4c2121e37404"} Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.830391 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="740c38a538fca640454299f837a419ee5e3eccd12cc3f3d47d8d4c2121e37404" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.830588 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-pnxjz" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.838977 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-h9l7h" event={"ID":"2e5e30f6-50a2-4615-97a4-8e666b41c54a","Type":"ContainerDied","Data":"399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469"} Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.839023 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="399393fa21954b28a761c3c8d0e02bcf8b3c73a140af9c3455d83feea6a23469" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.839088 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-h9l7h" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.872657 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-kp4jw" Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.873276 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-kp4jw" event={"ID":"0be0b236-d9e2-4cc1-88dd-264436334ae2","Type":"ContainerDied","Data":"3029522e5ce981ce0c8a2c7fbbaa8002e78b3b65c4281993598772fc3f593fd2"} Sep 29 17:09:02 crc kubenswrapper[4592]: I0929 17:09:02.873320 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3029522e5ce981ce0c8a2c7fbbaa8002e78b3b65c4281993598772fc3f593fd2" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.324430 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.381787 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.385461 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" containerID="cri-o://d87ea940166c0e95100aecb6d84240aa857576b1babda02b23e7d9a60f7e4996" gracePeriod=10 Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774299 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-ccba-account-create-gth5x"] Sep 29 17:09:08 crc kubenswrapper[4592]: E0929 17:09:08.774635 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0be0b236-d9e2-4cc1-88dd-264436334ae2" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774650 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0be0b236-d9e2-4cc1-88dd-264436334ae2" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: E0929 17:09:08.774672 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3797e303-3118-4d4b-a6da-78e5737d8fcc" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774678 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3797e303-3118-4d4b-a6da-78e5737d8fcc" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: E0929 17:09:08.774701 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e5e30f6-50a2-4615-97a4-8e666b41c54a" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774707 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e5e30f6-50a2-4615-97a4-8e666b41c54a" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774848 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0be0b236-d9e2-4cc1-88dd-264436334ae2" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774857 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e5e30f6-50a2-4615-97a4-8e666b41c54a" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.774869 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3797e303-3118-4d4b-a6da-78e5737d8fcc" containerName="mariadb-database-create" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.775361 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.777315 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.795822 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ccba-account-create-gth5x"] Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.881857 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-bd20-account-create-w6tjm"] Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.883406 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.885826 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.899214 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-bd20-account-create-w6tjm"] Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.937465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c5vs\" (UniqueName: \"kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs\") pod \"cinder-ccba-account-create-gth5x\" (UID: \"b6264c6a-bee2-432e-b70c-1afad98e3db5\") " pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.952344 4592 generic.go:334] "Generic (PLEG): container finished" podID="d2c2f732-4572-4064-9379-b627d76b87fd" containerID="d87ea940166c0e95100aecb6d84240aa857576b1babda02b23e7d9a60f7e4996" exitCode=0 Sep 29 17:09:08 crc kubenswrapper[4592]: I0929 17:09:08.952393 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" event={"ID":"d2c2f732-4572-4064-9379-b627d76b87fd","Type":"ContainerDied","Data":"d87ea940166c0e95100aecb6d84240aa857576b1babda02b23e7d9a60f7e4996"} Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.039170 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwztv\" (UniqueName: \"kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv\") pod \"barbican-bd20-account-create-w6tjm\" (UID: \"7a56497a-4fc4-4b0f-b831-8de37479949b\") " pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.039229 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c5vs\" (UniqueName: \"kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs\") pod \"cinder-ccba-account-create-gth5x\" (UID: \"b6264c6a-bee2-432e-b70c-1afad98e3db5\") " pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.075387 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f76f-account-create-gzdk8"] Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.076835 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.080609 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.086074 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c5vs\" (UniqueName: \"kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs\") pod \"cinder-ccba-account-create-gth5x\" (UID: \"b6264c6a-bee2-432e-b70c-1afad98e3db5\") " pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.098018 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.123574 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f76f-account-create-gzdk8"] Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.141242 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwztv\" (UniqueName: \"kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv\") pod \"barbican-bd20-account-create-w6tjm\" (UID: \"7a56497a-4fc4-4b0f-b831-8de37479949b\") " pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.163249 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwztv\" (UniqueName: \"kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv\") pod \"barbican-bd20-account-create-w6tjm\" (UID: \"7a56497a-4fc4-4b0f-b831-8de37479949b\") " pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.242837 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2xw9\" (UniqueName: \"kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9\") pod \"neutron-f76f-account-create-gzdk8\" (UID: \"14980c16-3c96-4bdc-a271-86ea80fe1ef3\") " pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.278619 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.344541 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2xw9\" (UniqueName: \"kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9\") pod \"neutron-f76f-account-create-gzdk8\" (UID: \"14980c16-3c96-4bdc-a271-86ea80fe1ef3\") " pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.389189 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2xw9\" (UniqueName: \"kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9\") pod \"neutron-f76f-account-create-gzdk8\" (UID: \"14980c16-3c96-4bdc-a271-86ea80fe1ef3\") " pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.423220 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:09 crc kubenswrapper[4592]: I0929 17:09:09.565457 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Sep 29 17:09:14 crc kubenswrapper[4592]: I0929 17:09:14.565487 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.114:5353: connect: connection refused" Sep 29 17:09:17 crc kubenswrapper[4592]: E0929 17:09:17.908096 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Sep 29 17:09:17 crc kubenswrapper[4592]: E0929 17:09:17.908550 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jpmc4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-9gcq7_openstack(f760ecfd-a454-4a77-89c3-0703ea63c515): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:09:17 crc kubenswrapper[4592]: E0929 17:09:17.910485 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-9gcq7" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" Sep 29 17:09:18 crc kubenswrapper[4592]: E0929 17:09:18.148554 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-9gcq7" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.191561 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.204940 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc\") pod \"d2c2f732-4572-4064-9379-b627d76b87fd\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.205020 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkjnm\" (UniqueName: \"kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm\") pod \"d2c2f732-4572-4064-9379-b627d76b87fd\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.205137 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb\") pod \"d2c2f732-4572-4064-9379-b627d76b87fd\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.210056 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb\") pod \"d2c2f732-4572-4064-9379-b627d76b87fd\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.210105 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config\") pod \"d2c2f732-4572-4064-9379-b627d76b87fd\" (UID: \"d2c2f732-4572-4064-9379-b627d76b87fd\") " Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.239496 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm" (OuterVolumeSpecName: "kube-api-access-tkjnm") pod "d2c2f732-4572-4064-9379-b627d76b87fd" (UID: "d2c2f732-4572-4064-9379-b627d76b87fd"). InnerVolumeSpecName "kube-api-access-tkjnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.326067 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkjnm\" (UniqueName: \"kubernetes.io/projected/d2c2f732-4572-4064-9379-b627d76b87fd-kube-api-access-tkjnm\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.358007 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d2c2f732-4572-4064-9379-b627d76b87fd" (UID: "d2c2f732-4572-4064-9379-b627d76b87fd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.383429 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d2c2f732-4572-4064-9379-b627d76b87fd" (UID: "d2c2f732-4572-4064-9379-b627d76b87fd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.391543 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d2c2f732-4572-4064-9379-b627d76b87fd" (UID: "d2c2f732-4572-4064-9379-b627d76b87fd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.405631 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config" (OuterVolumeSpecName: "config") pod "d2c2f732-4572-4064-9379-b627d76b87fd" (UID: "d2c2f732-4572-4064-9379-b627d76b87fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.428129 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.428197 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.428210 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.428220 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d2c2f732-4572-4064-9379-b627d76b87fd-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.591208 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f76f-account-create-gzdk8"] Sep 29 17:09:18 crc kubenswrapper[4592]: W0929 17:09:18.598860 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14980c16_3c96_4bdc_a271_86ea80fe1ef3.slice/crio-a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c WatchSource:0}: Error finding container a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c: Status 404 returned error can't find the container with id a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.606302 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.677008 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-bd20-account-create-w6tjm"] Sep 29 17:09:18 crc kubenswrapper[4592]: W0929 17:09:18.682380 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a56497a_4fc4_4b0f_b831_8de37479949b.slice/crio-24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c WatchSource:0}: Error finding container 24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c: Status 404 returned error can't find the container with id 24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.687607 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.741325 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-ccba-account-create-gth5x"] Sep 29 17:09:18 crc kubenswrapper[4592]: I0929 17:09:18.757021 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.082644 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" event={"ID":"d2c2f732-4572-4064-9379-b627d76b87fd","Type":"ContainerDied","Data":"3920c45f47fb2e4281e2caddefd169f443d4aeb8300bb4c3b0a408784e49036d"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.083967 4592 scope.go:117] "RemoveContainer" containerID="d87ea940166c0e95100aecb6d84240aa857576b1babda02b23e7d9a60f7e4996" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.082694 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zwcph" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.084032 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rplcj" event={"ID":"94891c4d-d21e-419f-9ca9-ce48bef5b069","Type":"ContainerStarted","Data":"3b4dd8b08baf69bafc9b66ad195e82abf44789bfbf01db4a305cb34a9c947086"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.085696 4592 generic.go:334] "Generic (PLEG): container finished" podID="7a56497a-4fc4-4b0f-b831-8de37479949b" containerID="92b54ce49fe66336b284dd3e48e80020a0d9c91bf10639a593aac95f9d26b310" exitCode=0 Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.085743 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bd20-account-create-w6tjm" event={"ID":"7a56497a-4fc4-4b0f-b831-8de37479949b","Type":"ContainerDied","Data":"92b54ce49fe66336b284dd3e48e80020a0d9c91bf10639a593aac95f9d26b310"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.085758 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bd20-account-create-w6tjm" event={"ID":"7a56497a-4fc4-4b0f-b831-8de37479949b","Type":"ContainerStarted","Data":"24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.087447 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ccba-account-create-gth5x" event={"ID":"b6264c6a-bee2-432e-b70c-1afad98e3db5","Type":"ContainerStarted","Data":"a4550ed229ae0c5d517b3b9646d8563cfe1886a767827988302647bc1ef6adf7"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.087470 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ccba-account-create-gth5x" event={"ID":"b6264c6a-bee2-432e-b70c-1afad98e3db5","Type":"ContainerStarted","Data":"8c98e0b50a12b989af61bb98c202ee1cf5fad2731ae2e603598d67aea3a69d3f"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.089240 4592 generic.go:334] "Generic (PLEG): container finished" podID="14980c16-3c96-4bdc-a271-86ea80fe1ef3" containerID="1b1d084b2300ab35e59270d0016acb470a7d058323d3727cf06ff278b1c6c197" exitCode=0 Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.089279 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f76f-account-create-gzdk8" event={"ID":"14980c16-3c96-4bdc-a271-86ea80fe1ef3","Type":"ContainerDied","Data":"1b1d084b2300ab35e59270d0016acb470a7d058323d3727cf06ff278b1c6c197"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.089298 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f76f-account-create-gzdk8" event={"ID":"14980c16-3c96-4bdc-a271-86ea80fe1ef3","Type":"ContainerStarted","Data":"a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c"} Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.109380 4592 scope.go:117] "RemoveContainer" containerID="5eb6a4f9f69f14ec0de1df22e2058284986837111f5de7322cfce8f7ba7d3978" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.112782 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-rplcj" podStartSLOduration=2.191235196 podStartE2EDuration="20.1127654s" podCreationTimestamp="2025-09-29 17:08:59 +0000 UTC" firstStartedPulling="2025-09-29 17:09:00.071458847 +0000 UTC m=+1070.219236538" lastFinishedPulling="2025-09-29 17:09:17.992989061 +0000 UTC m=+1088.140766742" observedRunningTime="2025-09-29 17:09:19.111710112 +0000 UTC m=+1089.259487793" watchObservedRunningTime="2025-09-29 17:09:19.1127654 +0000 UTC m=+1089.260543081" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.133079 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-ccba-account-create-gth5x" podStartSLOduration=11.133058844 podStartE2EDuration="11.133058844s" podCreationTimestamp="2025-09-29 17:09:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:09:19.127467764 +0000 UTC m=+1089.275245445" watchObservedRunningTime="2025-09-29 17:09:19.133058844 +0000 UTC m=+1089.280836525" Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.154198 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.171894 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zwcph"] Sep 29 17:09:19 crc kubenswrapper[4592]: I0929 17:09:19.195861 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" path="/var/lib/kubelet/pods/d2c2f732-4572-4064-9379-b627d76b87fd/volumes" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.101081 4592 generic.go:334] "Generic (PLEG): container finished" podID="b6264c6a-bee2-432e-b70c-1afad98e3db5" containerID="a4550ed229ae0c5d517b3b9646d8563cfe1886a767827988302647bc1ef6adf7" exitCode=0 Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.101934 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ccba-account-create-gth5x" event={"ID":"b6264c6a-bee2-432e-b70c-1afad98e3db5","Type":"ContainerDied","Data":"a4550ed229ae0c5d517b3b9646d8563cfe1886a767827988302647bc1ef6adf7"} Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.451845 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.453999 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.570678 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwztv\" (UniqueName: \"kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv\") pod \"7a56497a-4fc4-4b0f-b831-8de37479949b\" (UID: \"7a56497a-4fc4-4b0f-b831-8de37479949b\") " Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.570886 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2xw9\" (UniqueName: \"kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9\") pod \"14980c16-3c96-4bdc-a271-86ea80fe1ef3\" (UID: \"14980c16-3c96-4bdc-a271-86ea80fe1ef3\") " Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.576109 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv" (OuterVolumeSpecName: "kube-api-access-qwztv") pod "7a56497a-4fc4-4b0f-b831-8de37479949b" (UID: "7a56497a-4fc4-4b0f-b831-8de37479949b"). InnerVolumeSpecName "kube-api-access-qwztv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.576327 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9" (OuterVolumeSpecName: "kube-api-access-h2xw9") pod "14980c16-3c96-4bdc-a271-86ea80fe1ef3" (UID: "14980c16-3c96-4bdc-a271-86ea80fe1ef3"). InnerVolumeSpecName "kube-api-access-h2xw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.672378 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2xw9\" (UniqueName: \"kubernetes.io/projected/14980c16-3c96-4bdc-a271-86ea80fe1ef3-kube-api-access-h2xw9\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:20 crc kubenswrapper[4592]: I0929 17:09:20.672554 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwztv\" (UniqueName: \"kubernetes.io/projected/7a56497a-4fc4-4b0f-b831-8de37479949b-kube-api-access-qwztv\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.110803 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-bd20-account-create-w6tjm" event={"ID":"7a56497a-4fc4-4b0f-b831-8de37479949b","Type":"ContainerDied","Data":"24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c"} Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.110825 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-bd20-account-create-w6tjm" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.110849 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24a752f5f27b8063905c9d170831b76483a41879ff9195ee63c3fa4602b2fa9c" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.112383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f76f-account-create-gzdk8" event={"ID":"14980c16-3c96-4bdc-a271-86ea80fe1ef3","Type":"ContainerDied","Data":"a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c"} Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.112400 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f76f-account-create-gzdk8" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.112412 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a010be056a929fae2a8dd113854a5eaab46d3d6359ad3481febd8fb81509c43c" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.336075 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.382538 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c5vs\" (UniqueName: \"kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs\") pod \"b6264c6a-bee2-432e-b70c-1afad98e3db5\" (UID: \"b6264c6a-bee2-432e-b70c-1afad98e3db5\") " Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.388110 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs" (OuterVolumeSpecName: "kube-api-access-2c5vs") pod "b6264c6a-bee2-432e-b70c-1afad98e3db5" (UID: "b6264c6a-bee2-432e-b70c-1afad98e3db5"). InnerVolumeSpecName "kube-api-access-2c5vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:21 crc kubenswrapper[4592]: I0929 17:09:21.488672 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c5vs\" (UniqueName: \"kubernetes.io/projected/b6264c6a-bee2-432e-b70c-1afad98e3db5-kube-api-access-2c5vs\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:22 crc kubenswrapper[4592]: I0929 17:09:22.125067 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-ccba-account-create-gth5x" event={"ID":"b6264c6a-bee2-432e-b70c-1afad98e3db5","Type":"ContainerDied","Data":"8c98e0b50a12b989af61bb98c202ee1cf5fad2731ae2e603598d67aea3a69d3f"} Sep 29 17:09:22 crc kubenswrapper[4592]: I0929 17:09:22.125112 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c98e0b50a12b989af61bb98c202ee1cf5fad2731ae2e603598d67aea3a69d3f" Sep 29 17:09:22 crc kubenswrapper[4592]: I0929 17:09:22.125185 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-ccba-account-create-gth5x" Sep 29 17:09:23 crc kubenswrapper[4592]: I0929 17:09:23.134744 4592 generic.go:334] "Generic (PLEG): container finished" podID="94891c4d-d21e-419f-9ca9-ce48bef5b069" containerID="3b4dd8b08baf69bafc9b66ad195e82abf44789bfbf01db4a305cb34a9c947086" exitCode=0 Sep 29 17:09:23 crc kubenswrapper[4592]: I0929 17:09:23.134932 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rplcj" event={"ID":"94891c4d-d21e-419f-9ca9-ce48bef5b069","Type":"ContainerDied","Data":"3b4dd8b08baf69bafc9b66ad195e82abf44789bfbf01db4a305cb34a9c947086"} Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.459739 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rplcj" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.535123 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data\") pod \"94891c4d-d21e-419f-9ca9-ce48bef5b069\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.535181 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle\") pod \"94891c4d-d21e-419f-9ca9-ce48bef5b069\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.535259 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kmnd7\" (UniqueName: \"kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7\") pod \"94891c4d-d21e-419f-9ca9-ce48bef5b069\" (UID: \"94891c4d-d21e-419f-9ca9-ce48bef5b069\") " Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.540047 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7" (OuterVolumeSpecName: "kube-api-access-kmnd7") pod "94891c4d-d21e-419f-9ca9-ce48bef5b069" (UID: "94891c4d-d21e-419f-9ca9-ce48bef5b069"). InnerVolumeSpecName "kube-api-access-kmnd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.579679 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94891c4d-d21e-419f-9ca9-ce48bef5b069" (UID: "94891c4d-d21e-419f-9ca9-ce48bef5b069"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.581775 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data" (OuterVolumeSpecName: "config-data") pod "94891c4d-d21e-419f-9ca9-ce48bef5b069" (UID: "94891c4d-d21e-419f-9ca9-ce48bef5b069"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.637186 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.637439 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94891c4d-d21e-419f-9ca9-ce48bef5b069-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:24 crc kubenswrapper[4592]: I0929 17:09:24.637521 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kmnd7\" (UniqueName: \"kubernetes.io/projected/94891c4d-d21e-419f-9ca9-ce48bef5b069-kube-api-access-kmnd7\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.154484 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-rplcj" event={"ID":"94891c4d-d21e-419f-9ca9-ce48bef5b069","Type":"ContainerDied","Data":"e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731"} Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.154530 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1281e98430acaf3f3598c781c5f69249d015e17d623a0d573cd7fef0b4d3731" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.154567 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-rplcj" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.420787 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.421597 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94891c4d-d21e-419f-9ca9-ce48bef5b069" containerName="keystone-db-sync" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.421658 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="94891c4d-d21e-419f-9ca9-ce48bef5b069" containerName="keystone-db-sync" Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.421735 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="init" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.421795 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="init" Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.421853 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14980c16-3c96-4bdc-a271-86ea80fe1ef3" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.421902 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="14980c16-3c96-4bdc-a271-86ea80fe1ef3" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.421956 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a56497a-4fc4-4b0f-b831-8de37479949b" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422003 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a56497a-4fc4-4b0f-b831-8de37479949b" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.422059 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422102 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" Sep 29 17:09:25 crc kubenswrapper[4592]: E0929 17:09:25.422183 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6264c6a-bee2-432e-b70c-1afad98e3db5" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422231 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6264c6a-bee2-432e-b70c-1afad98e3db5" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422427 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2c2f732-4572-4064-9379-b627d76b87fd" containerName="dnsmasq-dns" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422486 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="94891c4d-d21e-419f-9ca9-ce48bef5b069" containerName="keystone-db-sync" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422548 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="14980c16-3c96-4bdc-a271-86ea80fe1ef3" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422599 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6264c6a-bee2-432e-b70c-1afad98e3db5" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.422646 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a56497a-4fc4-4b0f-b831-8de37479949b" containerName="mariadb-account-create" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.423530 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.438241 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.452420 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.452620 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.452730 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.452805 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.452973 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcr8j\" (UniqueName: \"kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.453060 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.512011 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-m49z6"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.513010 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.527005 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.527059 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.527078 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wm4xb" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.536610 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557716 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557807 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557834 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557853 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557879 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.557980 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcr8j\" (UniqueName: \"kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.559017 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.559678 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.574535 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.579675 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.584039 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.599726 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-m49z6"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.648137 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcr8j\" (UniqueName: \"kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j\") pod \"dnsmasq-dns-6f8c45789f-2nsgs\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.659812 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.659888 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.659966 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvqv4\" (UniqueName: \"kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.660045 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.660075 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.660103 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.744066 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762440 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762539 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvqv4\" (UniqueName: \"kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762610 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762637 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762662 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.762756 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.771561 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.771858 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.782386 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.795547 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.795963 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.850571 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.854313 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvqv4\" (UniqueName: \"kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4\") pod \"keystone-bootstrap-m49z6\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.855035 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.873593 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.873796 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.884572 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.890546 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.909559 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.925593 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.925920 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-gvc2h" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.926044 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.947502 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967720 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967781 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967821 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhwzz\" (UniqueName: \"kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967862 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967900 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967923 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.967937 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.973705 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-zzm6d"] Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.986006 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.996280 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.996813 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 17:09:25 crc kubenswrapper[4592]: I0929 17:09:25.996955 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-zznzl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.061338 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.070872 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.070950 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhwzz\" (UniqueName: \"kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.070996 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071038 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071072 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071104 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071124 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071229 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071246 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071280 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfltg\" (UniqueName: \"kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071312 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071332 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.071823 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.087214 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.122244 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zzm6d"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.142390 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-f2q9n"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.143438 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.148235 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-vvbts" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.148745 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.152073 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.152805 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhwzz\" (UniqueName: \"kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.153159 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.155775 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-f2q9n"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.160742 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.161040 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.161412 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data\") pod \"ceilometer-0\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.172881 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfltg\" (UniqueName: \"kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.172922 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.172969 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.173010 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.173032 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.173074 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.173089 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.173106 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dkzh\" (UniqueName: \"kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.174837 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.175606 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.176472 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.182523 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.189123 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.198940 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-hgnnh"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.200068 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.207402 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qvw9b" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.238032 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.254214 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.259253 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfltg\" (UniqueName: \"kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg\") pod \"horizon-85f9c775b9-28ldk\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.264547 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.265844 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.266740 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274211 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274304 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h644t\" (UniqueName: \"kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274348 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274367 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274419 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274455 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dkzh\" (UniqueName: \"kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274478 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274496 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.274584 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.276788 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-dvfxl"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.279117 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.281343 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.285213 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.294124 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4wqkv" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.294392 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.294512 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.326124 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dkzh\" (UniqueName: \"kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh\") pod \"neutron-db-sync-zzm6d\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.356014 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.367558 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376326 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376368 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376387 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376416 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376437 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376465 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376486 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376515 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376533 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376554 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376577 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376599 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376627 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xph5\" (UniqueName: \"kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376645 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnnvs\" (UniqueName: \"kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376669 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376706 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376733 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h644t\" (UniqueName: \"kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376756 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.376769 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nshlx\" (UniqueName: \"kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.377580 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.391371 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.393483 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.393765 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.398075 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.399738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.402698 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.406079 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h644t\" (UniqueName: \"kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t\") pod \"cinder-db-sync-f2q9n\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.417227 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-hgnnh"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.451702 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.466157 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-dvfxl"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478140 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478213 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478249 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478282 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478314 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478360 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478387 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478420 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478439 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478463 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478501 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xph5\" (UniqueName: \"kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478529 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnnvs\" (UniqueName: \"kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478559 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478587 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478604 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478644 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478663 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.478677 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nshlx\" (UniqueName: \"kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.479805 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.479860 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqv85\" (UniqueName: \"kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.480262 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.484026 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.485924 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.487419 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.496791 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.497913 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.499941 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.506094 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.525648 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.526111 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.526284 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.535800 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnnvs\" (UniqueName: \"kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs\") pod \"barbican-db-sync-hgnnh\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.535837 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xph5\" (UniqueName: \"kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5\") pod \"horizon-cdd8ff59-mrmdn\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.559932 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nshlx\" (UniqueName: \"kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx\") pod \"placement-db-sync-dvfxl\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586463 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586527 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586621 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586677 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqv85\" (UniqueName: \"kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586702 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.586823 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.587873 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.589469 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.590229 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.590347 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.594832 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.594877 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.630963 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.641471 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dvfxl" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.668215 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqv85\" (UniqueName: \"kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85\") pod \"dnsmasq-dns-fcfdd6f9f-jlk7t\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.751706 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:26 crc kubenswrapper[4592]: I0929 17:09:26.856056 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.246012 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" event={"ID":"3f0fb1f3-0917-4e11-ae38-77f9f7d83412","Type":"ContainerStarted","Data":"440312704b383dd22cfac2ca9b024bf9a6b70b3f0c02bc45593adf866bf165b8"} Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.262581 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-m49z6"] Sep 29 17:09:27 crc kubenswrapper[4592]: W0929 17:09:27.287960 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61c45af1_cacf_4a0d_896a_871a93f12c7f.slice/crio-c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8 WatchSource:0}: Error finding container c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8: Status 404 returned error can't find the container with id c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8 Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.291535 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.494074 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-f2q9n"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.517989 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.561203 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-zzm6d"] Sep 29 17:09:27 crc kubenswrapper[4592]: W0929 17:09:27.584614 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f923cac_6659_4bb9_9f5f_8278a4492b35.slice/crio-1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7 WatchSource:0}: Error finding container 1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7: Status 404 returned error can't find the container with id 1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7 Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.600600 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-dvfxl"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.621814 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:09:27 crc kubenswrapper[4592]: W0929 17:09:27.630447 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e678434_33e7_4c70_adff_88140eb9d3af.slice/crio-9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757 WatchSource:0}: Error finding container 9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757: Status 404 returned error can't find the container with id 9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757 Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.644612 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-hgnnh"] Sep 29 17:09:27 crc kubenswrapper[4592]: I0929 17:09:27.892706 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.286499 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerStarted","Data":"fef8548e5580e18ce75913ba950be5bd9200bb4538b95a3ed658424b5e4788b8"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.294081 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m49z6" event={"ID":"61c45af1-cacf-4a0d-896a-871a93f12c7f","Type":"ContainerStarted","Data":"4789bd247a0de604482b1c428a991531f2f398fbb3f663d37b9800f4cda91f08"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.294134 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m49z6" event={"ID":"61c45af1-cacf-4a0d-896a-871a93f12c7f","Type":"ContainerStarted","Data":"c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.297699 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hgnnh" event={"ID":"4df9236d-2c26-4b89-acfc-d0de121eb93c","Type":"ContainerStarted","Data":"2173c2273efa77e137922e163940a6ddcd11784af16c0a7bd4a96a65b0bc7db1"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.305852 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-cdd8ff59-mrmdn" event={"ID":"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f","Type":"ContainerStarted","Data":"a2badd3beb79822d7cc2305059e891203d5e1ec88b3bd8c1969e5c5ddfdfb572"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.312062 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f2q9n" event={"ID":"8c80e2b1-f512-432e-87fe-c0ea60e6a546","Type":"ContainerStarted","Data":"e76f98eb69826a4fcc35def68622085245e6d134be8a7cfa2a3e60bc4c7836d5"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.316156 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-m49z6" podStartSLOduration=3.31612578 podStartE2EDuration="3.31612578s" podCreationTimestamp="2025-09-29 17:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:09:28.312100863 +0000 UTC m=+1098.459878544" watchObservedRunningTime="2025-09-29 17:09:28.31612578 +0000 UTC m=+1098.463903461" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.321388 4592 generic.go:334] "Generic (PLEG): container finished" podID="3f0fb1f3-0917-4e11-ae38-77f9f7d83412" containerID="6a3013622c9eedb51d37ec3113b04fdb6f7521067911e0aff6d30c46443776b7" exitCode=0 Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.321485 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" event={"ID":"3f0fb1f3-0917-4e11-ae38-77f9f7d83412","Type":"ContainerDied","Data":"6a3013622c9eedb51d37ec3113b04fdb6f7521067911e0aff6d30c46443776b7"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.326799 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dvfxl" event={"ID":"9e678434-33e7-4c70-adff-88140eb9d3af","Type":"ContainerStarted","Data":"9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.329508 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zzm6d" event={"ID":"2f923cac-6659-4bb9-9f5f-8278a4492b35","Type":"ContainerStarted","Data":"375c759c41213f449730f5c9f475ac2191282e96fb3fdf7b826e4eec34d50581"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.329547 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zzm6d" event={"ID":"2f923cac-6659-4bb9-9f5f-8278a4492b35","Type":"ContainerStarted","Data":"1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.335050 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" event={"ID":"99bc4bab-6083-4e30-821e-d5eb27378cdb","Type":"ContainerStarted","Data":"eca2eaca084cab205d7c81e5775e098384320da49f2e0dfa85ed9e22f90abf22"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.346325 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85f9c775b9-28ldk" event={"ID":"62619408-aeb2-47f3-861c-2820afed3092","Type":"ContainerStarted","Data":"5cd4dbed947067264e668bc1c1d9de0a44b6441d79635141a1f57575d6dfeee8"} Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.409599 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-zzm6d" podStartSLOduration=3.409579419 podStartE2EDuration="3.409579419s" podCreationTimestamp="2025-09-29 17:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:09:28.397916856 +0000 UTC m=+1098.545694547" watchObservedRunningTime="2025-09-29 17:09:28.409579419 +0000 UTC m=+1098.557357100" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.783166 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867675 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867731 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867810 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867861 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcr8j\" (UniqueName: \"kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867947 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.867972 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config\") pod \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\" (UID: \"3f0fb1f3-0917-4e11-ae38-77f9f7d83412\") " Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.880366 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j" (OuterVolumeSpecName: "kube-api-access-gcr8j") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "kube-api-access-gcr8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.900960 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.904528 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.958730 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config" (OuterVolumeSpecName: "config") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.959540 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.966423 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3f0fb1f3-0917-4e11-ae38-77f9f7d83412" (UID: "3f0fb1f3-0917-4e11-ae38-77f9f7d83412"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.972922 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcr8j\" (UniqueName: \"kubernetes.io/projected/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-kube-api-access-gcr8j\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.972965 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.972977 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.972985 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.972994 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:28 crc kubenswrapper[4592]: I0929 17:09:28.973001 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3f0fb1f3-0917-4e11-ae38-77f9f7d83412-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.373331 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" event={"ID":"3f0fb1f3-0917-4e11-ae38-77f9f7d83412","Type":"ContainerDied","Data":"440312704b383dd22cfac2ca9b024bf9a6b70b3f0c02bc45593adf866bf165b8"} Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.373382 4592 scope.go:117] "RemoveContainer" containerID="6a3013622c9eedb51d37ec3113b04fdb6f7521067911e0aff6d30c46443776b7" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.373504 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-2nsgs" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.390525 4592 generic.go:334] "Generic (PLEG): container finished" podID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerID="7be71c0be23f3d5da498805286731d14b99e7b8cc7459c86773200a29cd2b245" exitCode=0 Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.390643 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" event={"ID":"99bc4bab-6083-4e30-821e-d5eb27378cdb","Type":"ContainerDied","Data":"7be71c0be23f3d5da498805286731d14b99e7b8cc7459c86773200a29cd2b245"} Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.488516 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.488592 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-2nsgs"] Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.584213 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.595617 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.680060 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:09:29 crc kubenswrapper[4592]: E0929 17:09:29.680494 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f0fb1f3-0917-4e11-ae38-77f9f7d83412" containerName="init" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.680512 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f0fb1f3-0917-4e11-ae38-77f9f7d83412" containerName="init" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.680669 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f0fb1f3-0917-4e11-ae38-77f9f7d83412" containerName="init" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.681574 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.721833 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.822133 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.822480 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr6n5\" (UniqueName: \"kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.822510 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.822542 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.822706 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.929928 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.929982 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr6n5\" (UniqueName: \"kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.930015 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.930061 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.930375 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.931785 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.930827 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.931330 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.948787 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:29 crc kubenswrapper[4592]: I0929 17:09:29.971709 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr6n5\" (UniqueName: \"kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5\") pod \"horizon-67f75cb869-47lbc\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:30 crc kubenswrapper[4592]: I0929 17:09:30.046008 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:09:30 crc kubenswrapper[4592]: I0929 17:09:30.440474 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" event={"ID":"99bc4bab-6083-4e30-821e-d5eb27378cdb","Type":"ContainerStarted","Data":"ee047cabbecd5c7bd27e095d82049950922ffda4d45c206770c911b7a95c93fe"} Sep 29 17:09:30 crc kubenswrapper[4592]: I0929 17:09:30.441858 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:30 crc kubenswrapper[4592]: I0929 17:09:30.465082 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" podStartSLOduration=4.465056957 podStartE2EDuration="4.465056957s" podCreationTimestamp="2025-09-29 17:09:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:09:30.457924568 +0000 UTC m=+1100.605702269" watchObservedRunningTime="2025-09-29 17:09:30.465056957 +0000 UTC m=+1100.612834638" Sep 29 17:09:30 crc kubenswrapper[4592]: I0929 17:09:30.753721 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:09:31 crc kubenswrapper[4592]: I0929 17:09:31.207122 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f0fb1f3-0917-4e11-ae38-77f9f7d83412" path="/var/lib/kubelet/pods/3f0fb1f3-0917-4e11-ae38-77f9f7d83412/volumes" Sep 29 17:09:31 crc kubenswrapper[4592]: I0929 17:09:31.451335 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f75cb869-47lbc" event={"ID":"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7","Type":"ContainerStarted","Data":"c235dd3f75b88128d018c1865d3c2860e9fa761f0ac07b251dad63533a73c0af"} Sep 29 17:09:35 crc kubenswrapper[4592]: I0929 17:09:35.928311 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:09:35 crc kubenswrapper[4592]: I0929 17:09:35.969583 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:09:35 crc kubenswrapper[4592]: I0929 17:09:35.970943 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:35 crc kubenswrapper[4592]: I0929 17:09:35.973624 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.007469 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.063368 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.094722 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbc6t\" (UniqueName: \"kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.094793 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.094834 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.094876 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.095333 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.095409 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.095432 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.144605 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-749bb4c784-lnncs"] Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.150320 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197251 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197300 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197294 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749bb4c784-lnncs"] Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197321 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197490 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbc6t\" (UniqueName: \"kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197575 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197644 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197705 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.197713 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.199465 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.200444 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.207877 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.216135 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.220042 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.247311 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbc6t\" (UniqueName: \"kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t\") pod \"horizon-c9567f99b-8nh47\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298747 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-secret-key\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298817 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-combined-ca-bundle\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298846 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-tls-certs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298885 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkh92\" (UniqueName: \"kubernetes.io/projected/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-kube-api-access-vkh92\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298908 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-config-data\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.298998 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-scripts\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.299023 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-logs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.311394 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.400181 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-scripts\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.400245 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-logs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.400292 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-secret-key\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.400335 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-combined-ca-bundle\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.400828 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-scripts\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.401250 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-tls-certs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.401391 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkh92\" (UniqueName: \"kubernetes.io/projected/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-kube-api-access-vkh92\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.401431 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-config-data\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.402811 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-logs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.402893 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-config-data\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.417438 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-secret-key\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.419881 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-combined-ca-bundle\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.420870 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-horizon-tls-certs\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.423945 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkh92\" (UniqueName: \"kubernetes.io/projected/2d536771-b1ae-4daf-a9f1-1a86e2af88e8-kube-api-access-vkh92\") pod \"horizon-749bb4c784-lnncs\" (UID: \"2d536771-b1ae-4daf-a9f1-1a86e2af88e8\") " pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.480178 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.538396 4592 generic.go:334] "Generic (PLEG): container finished" podID="61c45af1-cacf-4a0d-896a-871a93f12c7f" containerID="4789bd247a0de604482b1c428a991531f2f398fbb3f663d37b9800f4cda91f08" exitCode=0 Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.538472 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m49z6" event={"ID":"61c45af1-cacf-4a0d-896a-871a93f12c7f","Type":"ContainerDied","Data":"4789bd247a0de604482b1c428a991531f2f398fbb3f663d37b9800f4cda91f08"} Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.754366 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.827508 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:09:36 crc kubenswrapper[4592]: I0929 17:09:36.827732 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" containerID="cri-o://0d29eeaedce4647b7557c90e28ef940daf142fb9d1c2e55bc2c8f1cd3d8f1d97" gracePeriod=10 Sep 29 17:09:37 crc kubenswrapper[4592]: I0929 17:09:37.550402 4592 generic.go:334] "Generic (PLEG): container finished" podID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerID="0d29eeaedce4647b7557c90e28ef940daf142fb9d1c2e55bc2c8f1cd3d8f1d97" exitCode=0 Sep 29 17:09:37 crc kubenswrapper[4592]: I0929 17:09:37.550600 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" event={"ID":"ac859b1e-0413-4d50-ae61-ddc342af6877","Type":"ContainerDied","Data":"0d29eeaedce4647b7557c90e28ef940daf142fb9d1c2e55bc2c8f1cd3d8f1d97"} Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.290088 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.460891 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvqv4\" (UniqueName: \"kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.461375 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.461405 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.461544 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.461611 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.461664 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys\") pod \"61c45af1-cacf-4a0d-896a-871a93f12c7f\" (UID: \"61c45af1-cacf-4a0d-896a-871a93f12c7f\") " Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.467338 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.472006 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.475433 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts" (OuterVolumeSpecName: "scripts") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.484517 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4" (OuterVolumeSpecName: "kube-api-access-cvqv4") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "kube-api-access-cvqv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.497476 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.516559 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data" (OuterVolumeSpecName: "config-data") pod "61c45af1-cacf-4a0d-896a-871a93f12c7f" (UID: "61c45af1-cacf-4a0d-896a-871a93f12c7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564579 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvqv4\" (UniqueName: \"kubernetes.io/projected/61c45af1-cacf-4a0d-896a-871a93f12c7f-kube-api-access-cvqv4\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564603 4592 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564612 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564620 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564629 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564636 4592 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/61c45af1-cacf-4a0d-896a-871a93f12c7f-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564764 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m49z6" event={"ID":"61c45af1-cacf-4a0d-896a-871a93f12c7f","Type":"ContainerDied","Data":"c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8"} Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564788 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c873a395264d0ed243d894279cadbec1a7c3782c9d3966f13f1e2bb9300dbdc8" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.564864 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m49z6" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.696847 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-m49z6"] Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.703291 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-m49z6"] Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.808216 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-lgsdp"] Sep 29 17:09:38 crc kubenswrapper[4592]: E0929 17:09:38.808645 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c45af1-cacf-4a0d-896a-871a93f12c7f" containerName="keystone-bootstrap" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.808668 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c45af1-cacf-4a0d-896a-871a93f12c7f" containerName="keystone-bootstrap" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.808905 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="61c45af1-cacf-4a0d-896a-871a93f12c7f" containerName="keystone-bootstrap" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.809734 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.815766 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.815989 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.816168 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wm4xb" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.818800 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.823658 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lgsdp"] Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971685 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmb5r\" (UniqueName: \"kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971747 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971782 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971804 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971831 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:38 crc kubenswrapper[4592]: I0929 17:09:38.971941 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.073857 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.073932 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmb5r\" (UniqueName: \"kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.073967 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.073989 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.074006 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.074039 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.080676 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.081199 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.084342 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.084970 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.093342 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmb5r\" (UniqueName: \"kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.106993 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts\") pod \"keystone-bootstrap-lgsdp\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.133872 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:09:39 crc kubenswrapper[4592]: I0929 17:09:39.195870 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61c45af1-cacf-4a0d-896a-871a93f12c7f" path="/var/lib/kubelet/pods/61c45af1-cacf-4a0d-896a-871a93f12c7f/volumes" Sep 29 17:09:43 crc kubenswrapper[4592]: I0929 17:09:43.323792 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Sep 29 17:09:44 crc kubenswrapper[4592]: E0929 17:09:44.728673 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 17:09:44 crc kubenswrapper[4592]: E0929 17:09:44.728856 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66hd8h648h588h5b6h55ch676h5cbh577h68fh678h64bh66bh5fbh55bh576h554hf7h5d4h657h74h7ch649h684hfbh5cfh9ch5d5h58fh549h5b9h596q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2xph5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-cdd8ff59-mrmdn_openstack(d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:09:44 crc kubenswrapper[4592]: E0929 17:09:44.747372 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-cdd8ff59-mrmdn" podUID="d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.861670 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.902760 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.903202 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.903382 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.903531 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.903752 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kphg\" (UniqueName: \"kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.904980 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0\") pod \"ac859b1e-0413-4d50-ae61-ddc342af6877\" (UID: \"ac859b1e-0413-4d50-ae61-ddc342af6877\") " Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.919032 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg" (OuterVolumeSpecName: "kube-api-access-6kphg") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "kube-api-access-6kphg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.966369 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.983925 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.993691 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:44 crc kubenswrapper[4592]: I0929 17:09:44.998928 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config" (OuterVolumeSpecName: "config") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.007773 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.007807 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.007821 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.007834 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kphg\" (UniqueName: \"kubernetes.io/projected/ac859b1e-0413-4d50-ae61-ddc342af6877-kube-api-access-6kphg\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.007845 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.015179 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ac859b1e-0413-4d50-ae61-ddc342af6877" (UID: "ac859b1e-0413-4d50-ae61-ddc342af6877"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.110662 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ac859b1e-0413-4d50-ae61-ddc342af6877-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.651630 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.651639 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" event={"ID":"ac859b1e-0413-4d50-ae61-ddc342af6877","Type":"ContainerDied","Data":"e4ddb2739270f099679bf687e231ffd29b2b7527eed009750f9ba2f2fedcdefc"} Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.651729 4592 scope.go:117] "RemoveContainer" containerID="0d29eeaedce4647b7557c90e28ef940daf142fb9d1c2e55bc2c8f1cd3d8f1d97" Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.706592 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:09:45 crc kubenswrapper[4592]: I0929 17:09:45.715712 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-n87jh"] Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.949341 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.950022 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5ddh54fh655h54bhb7h5b5h5fbh8dhbdh77h6dh55ch55h5fbh5f8h54dh65fh5c8h5b5h555hfdh9ch57dh574h5cchf5h67hd8h5fh657h595h56fq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bfltg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-85f9c775b9-28ldk_openstack(62619408-aeb2-47f3-861c-2820afed3092): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.955697 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-85f9c775b9-28ldk" podUID="62619408-aeb2-47f3-861c-2820afed3092" Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.961047 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.961218 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n59h546h86h578h548hc8h5dch5d5h566h58ch7h698h5bfh87hdh677h7h5b7h5bh5ch5b7hd4h684h56bhc5h68ch7fh5d8hcdhcfh5c6h79q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tr6n5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-67f75cb869-47lbc_openstack(0a73cf15-6064-4d91-a3cb-801ffcc7cdc7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:09:46 crc kubenswrapper[4592]: E0929 17:09:46.963672 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-67f75cb869-47lbc" podUID="0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" Sep 29 17:09:47 crc kubenswrapper[4592]: I0929 17:09:47.192102 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" path="/var/lib/kubelet/pods/ac859b1e-0413-4d50-ae61-ddc342af6877/volumes" Sep 29 17:09:48 crc kubenswrapper[4592]: I0929 17:09:48.325624 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6d5b6d6b67-n87jh" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.126:5353: i/o timeout" Sep 29 17:09:57 crc kubenswrapper[4592]: I0929 17:09:57.756340 4592 generic.go:334] "Generic (PLEG): container finished" podID="2f923cac-6659-4bb9-9f5f-8278a4492b35" containerID="375c759c41213f449730f5c9f475ac2191282e96fb3fdf7b826e4eec34d50581" exitCode=0 Sep 29 17:09:57 crc kubenswrapper[4592]: I0929 17:09:57.756506 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zzm6d" event={"ID":"2f923cac-6659-4bb9-9f5f-8278a4492b35","Type":"ContainerDied","Data":"375c759c41213f449730f5c9f475ac2191282e96fb3fdf7b826e4eec34d50581"} Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.148494 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.310736 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts\") pod \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.310789 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key\") pod \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.310822 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs\") pod \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.310863 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xph5\" (UniqueName: \"kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5\") pod \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.310991 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data\") pod \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\" (UID: \"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f\") " Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.311282 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs" (OuterVolumeSpecName: "logs") pod "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" (UID: "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.311817 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.312611 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts" (OuterVolumeSpecName: "scripts") pod "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" (UID: "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.312684 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data" (OuterVolumeSpecName: "config-data") pod "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" (UID: "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.318769 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" (UID: "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.319172 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5" (OuterVolumeSpecName: "kube-api-access-2xph5") pod "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" (UID: "d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f"). InnerVolumeSpecName "kube-api-access-2xph5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.413699 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.413767 4592 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.413787 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xph5\" (UniqueName: \"kubernetes.io/projected/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-kube-api-access-2xph5\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.413800 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.784250 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-cdd8ff59-mrmdn" event={"ID":"d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f","Type":"ContainerDied","Data":"a2badd3beb79822d7cc2305059e891203d5e1ec88b3bd8c1969e5c5ddfdfb572"} Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.784308 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-cdd8ff59-mrmdn" Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.860997 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:10:00 crc kubenswrapper[4592]: I0929 17:10:00.869052 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-cdd8ff59-mrmdn"] Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.177092 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.177833 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.197040 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f" path="/var/lib/kubelet/pods/d50b4cc3-a396-4c58-a6c8-baf3d6c2bc3f/volumes" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328593 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfltg\" (UniqueName: \"kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg\") pod \"62619408-aeb2-47f3-861c-2820afed3092\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328688 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data\") pod \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328715 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs\") pod \"62619408-aeb2-47f3-861c-2820afed3092\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328793 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts\") pod \"62619408-aeb2-47f3-861c-2820afed3092\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328821 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs\") pod \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328857 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr6n5\" (UniqueName: \"kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5\") pod \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328902 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key\") pod \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328932 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data\") pod \"62619408-aeb2-47f3-861c-2820afed3092\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.328973 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts\") pod \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\" (UID: \"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329074 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key\") pod \"62619408-aeb2-47f3-861c-2820afed3092\" (UID: \"62619408-aeb2-47f3-861c-2820afed3092\") " Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329140 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs" (OuterVolumeSpecName: "logs") pod "62619408-aeb2-47f3-861c-2820afed3092" (UID: "62619408-aeb2-47f3-861c-2820afed3092"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329352 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data" (OuterVolumeSpecName: "config-data") pod "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" (UID: "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329577 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329596 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/62619408-aeb2-47f3-861c-2820afed3092-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.329733 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data" (OuterVolumeSpecName: "config-data") pod "62619408-aeb2-47f3-861c-2820afed3092" (UID: "62619408-aeb2-47f3-861c-2820afed3092"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.330241 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts" (OuterVolumeSpecName: "scripts") pod "62619408-aeb2-47f3-861c-2820afed3092" (UID: "62619408-aeb2-47f3-861c-2820afed3092"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.330350 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs" (OuterVolumeSpecName: "logs") pod "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" (UID: "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.330477 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts" (OuterVolumeSpecName: "scripts") pod "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" (UID: "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.342552 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" (UID: "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.342600 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "62619408-aeb2-47f3-861c-2820afed3092" (UID: "62619408-aeb2-47f3-861c-2820afed3092"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.342673 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5" (OuterVolumeSpecName: "kube-api-access-tr6n5") pod "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" (UID: "0a73cf15-6064-4d91-a3cb-801ffcc7cdc7"). InnerVolumeSpecName "kube-api-access-tr6n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.342655 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg" (OuterVolumeSpecName: "kube-api-access-bfltg") pod "62619408-aeb2-47f3-861c-2820afed3092" (UID: "62619408-aeb2-47f3-861c-2820afed3092"). InnerVolumeSpecName "kube-api-access-bfltg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431708 4592 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/62619408-aeb2-47f3-861c-2820afed3092-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431820 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfltg\" (UniqueName: \"kubernetes.io/projected/62619408-aeb2-47f3-861c-2820afed3092-kube-api-access-bfltg\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431867 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431880 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431893 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr6n5\" (UniqueName: \"kubernetes.io/projected/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-kube-api-access-tr6n5\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431906 4592 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431919 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62619408-aeb2-47f3-861c-2820afed3092-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.431929 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.793348 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-67f75cb869-47lbc" event={"ID":"0a73cf15-6064-4d91-a3cb-801ffcc7cdc7","Type":"ContainerDied","Data":"c235dd3f75b88128d018c1865d3c2860e9fa761f0ac07b251dad63533a73c0af"} Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.793365 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-67f75cb869-47lbc" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.795096 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-85f9c775b9-28ldk" event={"ID":"62619408-aeb2-47f3-861c-2820afed3092","Type":"ContainerDied","Data":"5cd4dbed947067264e668bc1c1d9de0a44b6441d79635141a1f57575d6dfeee8"} Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.795188 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-85f9c775b9-28ldk" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.854267 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.869955 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-67f75cb869-47lbc"] Sep 29 17:10:01 crc kubenswrapper[4592]: E0929 17:10:01.879745 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Sep 29 17:10:01 crc kubenswrapper[4592]: E0929 17:10:01.879913 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cnnvs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-hgnnh_openstack(4df9236d-2c26-4b89-acfc-d0de121eb93c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:10:01 crc kubenswrapper[4592]: E0929 17:10:01.881448 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-hgnnh" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.901039 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:10:01 crc kubenswrapper[4592]: I0929 17:10:01.907835 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-85f9c775b9-28ldk"] Sep 29 17:10:02 crc kubenswrapper[4592]: E0929 17:10:02.189106 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Sep 29 17:10:02 crc kubenswrapper[4592]: E0929 17:10:02.189320 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n66h7h648h549h9dh655h87h5cfh666h597h7ch546hbbh645h668hdch5dbh5d7h589h665h5b4h5cbh5f7h55bh64dhd5h67ch57h59fh55ch77h59bq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jhwzz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(455d573e-2ab3-4174-9b0d-9deaf6fa0a9a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.272305 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.447328 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dkzh\" (UniqueName: \"kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh\") pod \"2f923cac-6659-4bb9-9f5f-8278a4492b35\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.447861 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config\") pod \"2f923cac-6659-4bb9-9f5f-8278a4492b35\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.447903 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle\") pod \"2f923cac-6659-4bb9-9f5f-8278a4492b35\" (UID: \"2f923cac-6659-4bb9-9f5f-8278a4492b35\") " Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.465289 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh" (OuterVolumeSpecName: "kube-api-access-9dkzh") pod "2f923cac-6659-4bb9-9f5f-8278a4492b35" (UID: "2f923cac-6659-4bb9-9f5f-8278a4492b35"). InnerVolumeSpecName "kube-api-access-9dkzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.475988 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config" (OuterVolumeSpecName: "config") pod "2f923cac-6659-4bb9-9f5f-8278a4492b35" (UID: "2f923cac-6659-4bb9-9f5f-8278a4492b35"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.476075 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f923cac-6659-4bb9-9f5f-8278a4492b35" (UID: "2f923cac-6659-4bb9-9f5f-8278a4492b35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.551582 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.551616 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f923cac-6659-4bb9-9f5f-8278a4492b35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.551627 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dkzh\" (UniqueName: \"kubernetes.io/projected/2f923cac-6659-4bb9-9f5f-8278a4492b35-kube-api-access-9dkzh\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.806734 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-zzm6d" Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.809406 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-zzm6d" event={"ID":"2f923cac-6659-4bb9-9f5f-8278a4492b35","Type":"ContainerDied","Data":"1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7"} Sep 29 17:10:02 crc kubenswrapper[4592]: I0929 17:10:02.809502 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cbd47271526723936fb8ca1756658925b83fc81f0500d8568c444174d258ed7" Sep 29 17:10:02 crc kubenswrapper[4592]: E0929 17:10:02.812385 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-hgnnh" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.192733 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a73cf15-6064-4d91-a3cb-801ffcc7cdc7" path="/var/lib/kubelet/pods/0a73cf15-6064-4d91-a3cb-801ffcc7cdc7/volumes" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.193239 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62619408-aeb2-47f3-861c-2820afed3092" path="/var/lib/kubelet/pods/62619408-aeb2-47f3-861c-2820afed3092/volumes" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.362924 4592 scope.go:117] "RemoveContainer" containerID="0be83540b9bf9ecddfc33e1ab6a5408b79dc91741ab99d8c81d5c95d4512da60" Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.447252 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.447678 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h644t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-f2q9n_openstack(8c80e2b1-f512-432e-87fe-c0ea60e6a546): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.450303 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-f2q9n" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.487641 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.488523 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f923cac-6659-4bb9-9f5f-8278a4492b35" containerName="neutron-db-sync" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.488539 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f923cac-6659-4bb9-9f5f-8278a4492b35" containerName="neutron-db-sync" Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.488555 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.488560 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.502244 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="init" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.502287 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="init" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.502994 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f923cac-6659-4bb9-9f5f-8278a4492b35" containerName="neutron-db-sync" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.503012 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac859b1e-0413-4d50-ae61-ddc342af6877" containerName="dnsmasq-dns" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.512907 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.689582 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695182 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695284 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695422 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhgnw\" (UniqueName: \"kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695479 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695752 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.695789 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.716397 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.718562 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.722311 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.722530 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.722356 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-zznzl" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.726202 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.729869 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797030 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797086 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797114 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797159 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797210 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797238 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhgnw\" (UniqueName: \"kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797260 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797294 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797353 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c49c2\" (UniqueName: \"kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797403 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.797438 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.798459 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.799040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.802313 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.803901 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.804037 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.826995 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhgnw\" (UniqueName: \"kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw\") pod \"dnsmasq-dns-6664c6795f-th7d5\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.830551 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dvfxl" event={"ID":"9e678434-33e7-4c70-adff-88140eb9d3af","Type":"ContainerStarted","Data":"d660359deadfeacc0f11b2dd28fdcbb5cf8df3582de8dce24c059af2ce7f563d"} Sep 29 17:10:03 crc kubenswrapper[4592]: E0929 17:10:03.831680 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-f2q9n" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.855608 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-dvfxl" podStartSLOduration=3.33703269 podStartE2EDuration="37.855586964s" podCreationTimestamp="2025-09-29 17:09:26 +0000 UTC" firstStartedPulling="2025-09-29 17:09:27.658625665 +0000 UTC m=+1097.806403346" lastFinishedPulling="2025-09-29 17:10:02.177179939 +0000 UTC m=+1132.324957620" observedRunningTime="2025-09-29 17:10:03.850670177 +0000 UTC m=+1133.998447858" watchObservedRunningTime="2025-09-29 17:10:03.855586964 +0000 UTC m=+1134.003364655" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.899417 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.899477 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.899527 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c49c2\" (UniqueName: \"kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.899564 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.899602 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.907103 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.907654 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.911710 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.912298 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:03 crc kubenswrapper[4592]: I0929 17:10:03.929569 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c49c2\" (UniqueName: \"kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2\") pod \"neutron-6868c5fc6d-w5577\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.016521 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.071680 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.139402 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-749bb4c784-lnncs"] Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.196512 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.274307 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-lgsdp"] Sep 29 17:10:04 crc kubenswrapper[4592]: W0929 17:10:04.298748 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc518ef27_73ac_4f0b_a896_44284fd049c5.slice/crio-eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a WatchSource:0}: Error finding container eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a: Status 404 returned error can't find the container with id eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.744761 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.836830 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.844119 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerStarted","Data":"e68cf4336428d074cf25556144e91fa928ad9f137e8548b2badb168f6ac6fec8"} Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.859416 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" event={"ID":"e4096f1b-fd5a-4de6-8b4a-019801da00ea","Type":"ContainerStarted","Data":"4a4b84e27f2174e168c07f871974e7af625a2e0512b9f38ef7547ac474a898d2"} Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.861431 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerStarted","Data":"05901e694a55a7dbcd158c07132315dda35751b17dcdacabf76c841cc0bc82cc"} Sep 29 17:10:04 crc kubenswrapper[4592]: I0929 17:10:04.869444 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lgsdp" event={"ID":"c518ef27-73ac-4f0b-a896-44284fd049c5","Type":"ContainerStarted","Data":"eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.887536 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lgsdp" event={"ID":"c518ef27-73ac-4f0b-a896-44284fd049c5","Type":"ContainerStarted","Data":"6e8b2fc8aace48ca49c4f12eb56df304e012ad4cab16de0938de1bc9cf5dc76a"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.897835 4592 generic.go:334] "Generic (PLEG): container finished" podID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerID="11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112" exitCode=0 Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.897938 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" event={"ID":"e4096f1b-fd5a-4de6-8b4a-019801da00ea","Type":"ContainerDied","Data":"11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.906964 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9gcq7" event={"ID":"f760ecfd-a454-4a77-89c3-0703ea63c515","Type":"ContainerStarted","Data":"3d153efe2d442d5cf9d5c2ea7ea15199eb598344fac9ac084fc12f46e2e1847a"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.915605 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerStarted","Data":"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.915664 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerStarted","Data":"42e9948f7b231c4e87fe86a9f5e1ddcd3fc4116c8f9017af6cd15fc70da940ec"} Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.918562 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-lgsdp" podStartSLOduration=27.918546317 podStartE2EDuration="27.918546317s" podCreationTimestamp="2025-09-29 17:09:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:05.916100618 +0000 UTC m=+1136.063878299" watchObservedRunningTime="2025-09-29 17:10:05.918546317 +0000 UTC m=+1136.066323998" Sep 29 17:10:05 crc kubenswrapper[4592]: I0929 17:10:05.950304 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-9gcq7" podStartSLOduration=4.602342944 podStartE2EDuration="1m8.950284793s" podCreationTimestamp="2025-09-29 17:08:57 +0000 UTC" firstStartedPulling="2025-09-29 17:08:57.829192839 +0000 UTC m=+1067.976970520" lastFinishedPulling="2025-09-29 17:10:02.177134688 +0000 UTC m=+1132.324912369" observedRunningTime="2025-09-29 17:10:05.939516573 +0000 UTC m=+1136.087294254" watchObservedRunningTime="2025-09-29 17:10:05.950284793 +0000 UTC m=+1136.098062474" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.609218 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5588c77f49-qmt48"] Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.611129 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.612764 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.613648 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.619748 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5588c77f49-qmt48"] Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692494 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-public-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692636 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-ovndb-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692678 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-httpd-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692718 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-internal-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692743 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-combined-ca-bundle\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692805 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.692854 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js26j\" (UniqueName: \"kubernetes.io/projected/177b2eb7-9986-4985-bd07-1b5a5d86f678-kube-api-access-js26j\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794251 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-ovndb-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794370 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-httpd-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794456 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-internal-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794503 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-combined-ca-bundle\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794642 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794737 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js26j\" (UniqueName: \"kubernetes.io/projected/177b2eb7-9986-4985-bd07-1b5a5d86f678-kube-api-access-js26j\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.794837 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-public-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.798765 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-ovndb-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.801361 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-public-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.801843 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-httpd-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.802308 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-config\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.802712 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-internal-tls-certs\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.808731 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/177b2eb7-9986-4985-bd07-1b5a5d86f678-combined-ca-bundle\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.816810 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js26j\" (UniqueName: \"kubernetes.io/projected/177b2eb7-9986-4985-bd07-1b5a5d86f678-kube-api-access-js26j\") pod \"neutron-5588c77f49-qmt48\" (UID: \"177b2eb7-9986-4985-bd07-1b5a5d86f678\") " pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.925298 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerStarted","Data":"5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.925609 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerStarted","Data":"c044f7528ceed902d66d79d1f760ee4728348faeae6cb231d66d1c9544900e03"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.928459 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" event={"ID":"e4096f1b-fd5a-4de6-8b4a-019801da00ea","Type":"ContainerStarted","Data":"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.928567 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.931120 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerStarted","Data":"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.931194 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.931950 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.933344 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerStarted","Data":"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.935411 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerStarted","Data":"5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.935448 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerStarted","Data":"22b225bc882901718f0af7cd169db2e7ad217a076051ff8f3bfcaf3a7fd6e117"} Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.952968 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-c9567f99b-8nh47" podStartSLOduration=30.743484259 podStartE2EDuration="31.95295005s" podCreationTimestamp="2025-09-29 17:09:35 +0000 UTC" firstStartedPulling="2025-09-29 17:10:04.208597921 +0000 UTC m=+1134.356375602" lastFinishedPulling="2025-09-29 17:10:05.418063712 +0000 UTC m=+1135.565841393" observedRunningTime="2025-09-29 17:10:06.951085238 +0000 UTC m=+1137.098862929" watchObservedRunningTime="2025-09-29 17:10:06.95295005 +0000 UTC m=+1137.100727731" Sep 29 17:10:06 crc kubenswrapper[4592]: I0929 17:10:06.987854 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6868c5fc6d-w5577" podStartSLOduration=3.987835934 podStartE2EDuration="3.987835934s" podCreationTimestamp="2025-09-29 17:10:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:06.986522057 +0000 UTC m=+1137.134299758" watchObservedRunningTime="2025-09-29 17:10:06.987835934 +0000 UTC m=+1137.135613615" Sep 29 17:10:07 crc kubenswrapper[4592]: I0929 17:10:07.016968 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-749bb4c784-lnncs" podStartSLOduration=29.776971745 podStartE2EDuration="31.016949317s" podCreationTimestamp="2025-09-29 17:09:36 +0000 UTC" firstStartedPulling="2025-09-29 17:10:04.180210369 +0000 UTC m=+1134.327988050" lastFinishedPulling="2025-09-29 17:10:05.420187941 +0000 UTC m=+1135.567965622" observedRunningTime="2025-09-29 17:10:07.008354437 +0000 UTC m=+1137.156132118" watchObservedRunningTime="2025-09-29 17:10:07.016949317 +0000 UTC m=+1137.164726998" Sep 29 17:10:07 crc kubenswrapper[4592]: I0929 17:10:07.043028 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" podStartSLOduration=4.042986674 podStartE2EDuration="4.042986674s" podCreationTimestamp="2025-09-29 17:10:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:07.032546503 +0000 UTC m=+1137.180324204" watchObservedRunningTime="2025-09-29 17:10:07.042986674 +0000 UTC m=+1137.190764355" Sep 29 17:10:07 crc kubenswrapper[4592]: I0929 17:10:07.574351 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5588c77f49-qmt48"] Sep 29 17:10:07 crc kubenswrapper[4592]: I0929 17:10:07.945463 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5588c77f49-qmt48" event={"ID":"177b2eb7-9986-4985-bd07-1b5a5d86f678","Type":"ContainerStarted","Data":"575f471fe1efd932e530b4701a07f88cf969b7507cb06188cefe044da238b448"} Sep 29 17:10:08 crc kubenswrapper[4592]: I0929 17:10:08.954986 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5588c77f49-qmt48" event={"ID":"177b2eb7-9986-4985-bd07-1b5a5d86f678","Type":"ContainerStarted","Data":"9b408b740ee7a32821ef5535722fa03cea10d2818ed62ca6723d191c88a5c306"} Sep 29 17:10:08 crc kubenswrapper[4592]: I0929 17:10:08.955641 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:08 crc kubenswrapper[4592]: I0929 17:10:08.955657 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5588c77f49-qmt48" event={"ID":"177b2eb7-9986-4985-bd07-1b5a5d86f678","Type":"ContainerStarted","Data":"aad58f6c6e1d1c4721587748017bff63a6a8c53d4fbce97555b6ae0ce4e9fe7a"} Sep 29 17:10:08 crc kubenswrapper[4592]: I0929 17:10:08.987970 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5588c77f49-qmt48" podStartSLOduration=2.987952413 podStartE2EDuration="2.987952413s" podCreationTimestamp="2025-09-29 17:10:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:08.970754613 +0000 UTC m=+1139.118532304" watchObservedRunningTime="2025-09-29 17:10:08.987952413 +0000 UTC m=+1139.135730094" Sep 29 17:10:09 crc kubenswrapper[4592]: I0929 17:10:09.965718 4592 generic.go:334] "Generic (PLEG): container finished" podID="9e678434-33e7-4c70-adff-88140eb9d3af" containerID="d660359deadfeacc0f11b2dd28fdcbb5cf8df3582de8dce24c059af2ce7f563d" exitCode=0 Sep 29 17:10:09 crc kubenswrapper[4592]: I0929 17:10:09.965913 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dvfxl" event={"ID":"9e678434-33e7-4c70-adff-88140eb9d3af","Type":"ContainerDied","Data":"d660359deadfeacc0f11b2dd28fdcbb5cf8df3582de8dce24c059af2ce7f563d"} Sep 29 17:10:14 crc kubenswrapper[4592]: I0929 17:10:14.005341 4592 generic.go:334] "Generic (PLEG): container finished" podID="c518ef27-73ac-4f0b-a896-44284fd049c5" containerID="6e8b2fc8aace48ca49c4f12eb56df304e012ad4cab16de0938de1bc9cf5dc76a" exitCode=0 Sep 29 17:10:14 crc kubenswrapper[4592]: I0929 17:10:14.005726 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lgsdp" event={"ID":"c518ef27-73ac-4f0b-a896-44284fd049c5","Type":"ContainerDied","Data":"6e8b2fc8aace48ca49c4f12eb56df304e012ad4cab16de0938de1bc9cf5dc76a"} Sep 29 17:10:14 crc kubenswrapper[4592]: I0929 17:10:14.019339 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:14 crc kubenswrapper[4592]: I0929 17:10:14.104042 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:10:14 crc kubenswrapper[4592]: I0929 17:10:14.104340 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="dnsmasq-dns" containerID="cri-o://ee047cabbecd5c7bd27e095d82049950922ffda4d45c206770c911b7a95c93fe" gracePeriod=10 Sep 29 17:10:15 crc kubenswrapper[4592]: I0929 17:10:15.025736 4592 generic.go:334] "Generic (PLEG): container finished" podID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerID="ee047cabbecd5c7bd27e095d82049950922ffda4d45c206770c911b7a95c93fe" exitCode=0 Sep 29 17:10:15 crc kubenswrapper[4592]: I0929 17:10:15.025829 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" event={"ID":"99bc4bab-6083-4e30-821e-d5eb27378cdb","Type":"ContainerDied","Data":"ee047cabbecd5c7bd27e095d82049950922ffda4d45c206770c911b7a95c93fe"} Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.052225 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-lgsdp" event={"ID":"c518ef27-73ac-4f0b-a896-44284fd049c5","Type":"ContainerDied","Data":"eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a"} Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.052774 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eca3e47714cea372fe72a12783c0cebcc52b3990d7dd875ed3ba73aa450a603a" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.058810 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-dvfxl" event={"ID":"9e678434-33e7-4c70-adff-88140eb9d3af","Type":"ContainerDied","Data":"9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757"} Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.058978 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9feeac5470c37a0721816878a93e73259598ebb64ce6cbdc1b4469b578ab1757" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.180310 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dvfxl" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.180975 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311469 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311518 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nshlx\" (UniqueName: \"kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx\") pod \"9e678434-33e7-4c70-adff-88140eb9d3af\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311571 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311612 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data\") pod \"9e678434-33e7-4c70-adff-88140eb9d3af\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311667 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311706 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs\") pod \"9e678434-33e7-4c70-adff-88140eb9d3af\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311778 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmb5r\" (UniqueName: \"kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311839 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts\") pod \"9e678434-33e7-4c70-adff-88140eb9d3af\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311872 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle\") pod \"9e678434-33e7-4c70-adff-88140eb9d3af\" (UID: \"9e678434-33e7-4c70-adff-88140eb9d3af\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311890 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.311918 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle\") pod \"c518ef27-73ac-4f0b-a896-44284fd049c5\" (UID: \"c518ef27-73ac-4f0b-a896-44284fd049c5\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.317367 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.322848 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts" (OuterVolumeSpecName: "scripts") pod "9e678434-33e7-4c70-adff-88140eb9d3af" (UID: "9e678434-33e7-4c70-adff-88140eb9d3af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.323074 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.323116 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.324080 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs" (OuterVolumeSpecName: "logs") pod "9e678434-33e7-4c70-adff-88140eb9d3af" (UID: "9e678434-33e7-4c70-adff-88140eb9d3af"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.326324 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r" (OuterVolumeSpecName: "kube-api-access-zmb5r") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "kube-api-access-zmb5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.326738 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.332600 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.332933 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts" (OuterVolumeSpecName: "scripts") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.338687 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx" (OuterVolumeSpecName: "kube-api-access-nshlx") pod "9e678434-33e7-4c70-adff-88140eb9d3af" (UID: "9e678434-33e7-4c70-adff-88140eb9d3af"). InnerVolumeSpecName "kube-api-access-nshlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.364090 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.368512 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.377992 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e678434-33e7-4c70-adff-88140eb9d3af" (UID: "9e678434-33e7-4c70-adff-88140eb9d3af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.389127 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data" (OuterVolumeSpecName: "config-data") pod "9e678434-33e7-4c70-adff-88140eb9d3af" (UID: "9e678434-33e7-4c70-adff-88140eb9d3af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.390493 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data" (OuterVolumeSpecName: "config-data") pod "c518ef27-73ac-4f0b-a896-44284fd049c5" (UID: "c518ef27-73ac-4f0b-a896-44284fd049c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422252 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmb5r\" (UniqueName: \"kubernetes.io/projected/c518ef27-73ac-4f0b-a896-44284fd049c5-kube-api-access-zmb5r\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422298 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422321 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422336 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422354 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422367 4592 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-credential-keys\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422384 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nshlx\" (UniqueName: \"kubernetes.io/projected/9e678434-33e7-4c70-adff-88140eb9d3af-kube-api-access-nshlx\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422396 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422409 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e678434-33e7-4c70-adff-88140eb9d3af-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422423 4592 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c518ef27-73ac-4f0b-a896-44284fd049c5-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.422439 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e678434-33e7-4c70-adff-88140eb9d3af-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.482807 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.484167 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.484939 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523233 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523365 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523466 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqv85\" (UniqueName: \"kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523592 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523618 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.523652 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config\") pod \"99bc4bab-6083-4e30-821e-d5eb27378cdb\" (UID: \"99bc4bab-6083-4e30-821e-d5eb27378cdb\") " Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.527173 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85" (OuterVolumeSpecName: "kube-api-access-jqv85") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "kube-api-access-jqv85". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.577377 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.587903 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.588885 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config" (OuterVolumeSpecName: "config") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.595908 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.600378 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "99bc4bab-6083-4e30-821e-d5eb27378cdb" (UID: "99bc4bab-6083-4e30-821e-d5eb27378cdb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625390 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625427 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqv85\" (UniqueName: \"kubernetes.io/projected/99bc4bab-6083-4e30-821e-d5eb27378cdb-kube-api-access-jqv85\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625437 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625446 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625456 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:16 crc kubenswrapper[4592]: I0929 17:10:16.625465 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/99bc4bab-6083-4e30-821e-d5eb27378cdb-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.067820 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerStarted","Data":"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366"} Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.070077 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" event={"ID":"99bc4bab-6083-4e30-821e-d5eb27378cdb","Type":"ContainerDied","Data":"eca2eaca084cab205d7c81e5775e098384320da49f2e0dfa85ed9e22f90abf22"} Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.070116 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-lgsdp" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.070174 4592 scope.go:117] "RemoveContainer" containerID="ee047cabbecd5c7bd27e095d82049950922ffda4d45c206770c911b7a95c93fe" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.070187 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-jlk7t" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.070220 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-dvfxl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.095184 4592 scope.go:117] "RemoveContainer" containerID="7be71c0be23f3d5da498805286731d14b99e7b8cc7459c86773200a29cd2b245" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.141605 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.148594 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-jlk7t"] Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.195652 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" path="/var/lib/kubelet/pods/99bc4bab-6083-4e30-821e-d5eb27378cdb/volumes" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.285368 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6cbb8cd48-47ckj"] Sep 29 17:10:17 crc kubenswrapper[4592]: E0929 17:10:17.286073 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e678434-33e7-4c70-adff-88140eb9d3af" containerName="placement-db-sync" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286133 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e678434-33e7-4c70-adff-88140eb9d3af" containerName="placement-db-sync" Sep 29 17:10:17 crc kubenswrapper[4592]: E0929 17:10:17.286203 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="init" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286260 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="init" Sep 29 17:10:17 crc kubenswrapper[4592]: E0929 17:10:17.286314 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c518ef27-73ac-4f0b-a896-44284fd049c5" containerName="keystone-bootstrap" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286360 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c518ef27-73ac-4f0b-a896-44284fd049c5" containerName="keystone-bootstrap" Sep 29 17:10:17 crc kubenswrapper[4592]: E0929 17:10:17.286418 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="dnsmasq-dns" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286484 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="dnsmasq-dns" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286694 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="99bc4bab-6083-4e30-821e-d5eb27378cdb" containerName="dnsmasq-dns" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286766 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c518ef27-73ac-4f0b-a896-44284fd049c5" containerName="keystone-bootstrap" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.286827 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e678434-33e7-4c70-adff-88140eb9d3af" containerName="placement-db-sync" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.287691 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.295521 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.295745 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.295521 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.295922 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.296102 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-4wqkv" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.313979 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6cbb8cd48-47ckj"] Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.387904 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-584d46f4c7-tdlrl"] Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.389593 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.394566 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.394771 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.394661 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-wm4xb" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.394719 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.395623 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.395820 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.435296 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-584d46f4c7-tdlrl"] Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437340 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f53c28fe-50d3-49b2-926e-fe4f166838ce-logs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437379 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-combined-ca-bundle\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437399 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-scripts\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437416 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-config-data\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437434 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6l58\" (UniqueName: \"kubernetes.io/projected/f53c28fe-50d3-49b2-926e-fe4f166838ce-kube-api-access-t6l58\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437493 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-internal-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.437549 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-public-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.538745 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-public-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.538964 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-internal-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539037 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-internal-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539188 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-credential-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539283 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-combined-ca-bundle\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539365 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-public-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539439 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-fernet-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539533 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-config-data\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539606 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn8hf\" (UniqueName: \"kubernetes.io/projected/728e33a1-191b-4c9d-a2d2-e569433182ea-kube-api-access-fn8hf\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539694 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f53c28fe-50d3-49b2-926e-fe4f166838ce-logs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539762 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-combined-ca-bundle\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539829 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-scripts\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539899 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-scripts\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.539967 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-config-data\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.540037 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6l58\" (UniqueName: \"kubernetes.io/projected/f53c28fe-50d3-49b2-926e-fe4f166838ce-kube-api-access-t6l58\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.541529 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f53c28fe-50d3-49b2-926e-fe4f166838ce-logs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.546100 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-config-data\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.549132 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-scripts\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.549396 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-public-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.552200 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-combined-ca-bundle\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.556091 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f53c28fe-50d3-49b2-926e-fe4f166838ce-internal-tls-certs\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.556759 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6l58\" (UniqueName: \"kubernetes.io/projected/f53c28fe-50d3-49b2-926e-fe4f166838ce-kube-api-access-t6l58\") pod \"placement-6cbb8cd48-47ckj\" (UID: \"f53c28fe-50d3-49b2-926e-fe4f166838ce\") " pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642008 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-public-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642076 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-internal-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642114 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-credential-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642129 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-combined-ca-bundle\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642196 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-fernet-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642217 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-config-data\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642251 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn8hf\" (UniqueName: \"kubernetes.io/projected/728e33a1-191b-4c9d-a2d2-e569433182ea-kube-api-access-fn8hf\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.642293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-scripts\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.643410 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.648504 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-internal-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.648739 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-config-data\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.649088 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-credential-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.671612 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-public-tls-certs\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.672669 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-combined-ca-bundle\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.675047 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-fernet-keys\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.677914 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn8hf\" (UniqueName: \"kubernetes.io/projected/728e33a1-191b-4c9d-a2d2-e569433182ea-kube-api-access-fn8hf\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.682849 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/728e33a1-191b-4c9d-a2d2-e569433182ea-scripts\") pod \"keystone-584d46f4c7-tdlrl\" (UID: \"728e33a1-191b-4c9d-a2d2-e569433182ea\") " pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:17 crc kubenswrapper[4592]: I0929 17:10:17.722776 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:18 crc kubenswrapper[4592]: I0929 17:10:18.000395 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6cbb8cd48-47ckj"] Sep 29 17:10:18 crc kubenswrapper[4592]: I0929 17:10:18.099997 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6cbb8cd48-47ckj" event={"ID":"f53c28fe-50d3-49b2-926e-fe4f166838ce","Type":"ContainerStarted","Data":"528fea8de5f572b4be86570d7565e659e4d2860a2d0c6f4dc0f9a67e530d62c9"} Sep 29 17:10:18 crc kubenswrapper[4592]: I0929 17:10:18.367825 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-584d46f4c7-tdlrl"] Sep 29 17:10:18 crc kubenswrapper[4592]: W0929 17:10:18.405709 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod728e33a1_191b_4c9d_a2d2_e569433182ea.slice/crio-dee78fe33b727d5e3ccd936eba0b56e41f63efa40cbfcf1e8a76dea395784aad WatchSource:0}: Error finding container dee78fe33b727d5e3ccd936eba0b56e41f63efa40cbfcf1e8a76dea395784aad: Status 404 returned error can't find the container with id dee78fe33b727d5e3ccd936eba0b56e41f63efa40cbfcf1e8a76dea395784aad Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.163605 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6cbb8cd48-47ckj" event={"ID":"f53c28fe-50d3-49b2-926e-fe4f166838ce","Type":"ContainerStarted","Data":"f0c1f946f4910b5d734d12f61fa23d0ba351b468757192110a42a5209a50b404"} Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.163659 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.163671 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.163682 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6cbb8cd48-47ckj" event={"ID":"f53c28fe-50d3-49b2-926e-fe4f166838ce","Type":"ContainerStarted","Data":"ba7a833ece8eb44af2f918d7a2e264261a575e4d0c19f79f5e2c90a2a27a3baa"} Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.169061 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hgnnh" event={"ID":"4df9236d-2c26-4b89-acfc-d0de121eb93c","Type":"ContainerStarted","Data":"70eb1f280be325225e6184281858027bdcf55b861e6e8eddcd1312a382c946a4"} Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.178077 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-584d46f4c7-tdlrl" event={"ID":"728e33a1-191b-4c9d-a2d2-e569433182ea","Type":"ContainerStarted","Data":"e48f9310bd524ed91c07ecf9aad799bdc0c87c001236ba15951942ba332a5aea"} Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.178119 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-584d46f4c7-tdlrl" event={"ID":"728e33a1-191b-4c9d-a2d2-e569433182ea","Type":"ContainerStarted","Data":"dee78fe33b727d5e3ccd936eba0b56e41f63efa40cbfcf1e8a76dea395784aad"} Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.178869 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.191684 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6cbb8cd48-47ckj" podStartSLOduration=2.191668576 podStartE2EDuration="2.191668576s" podCreationTimestamp="2025-09-29 17:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:19.190787112 +0000 UTC m=+1149.338564783" watchObservedRunningTime="2025-09-29 17:10:19.191668576 +0000 UTC m=+1149.339446257" Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.237180 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-584d46f4c7-tdlrl" podStartSLOduration=2.237157756 podStartE2EDuration="2.237157756s" podCreationTimestamp="2025-09-29 17:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:19.215226425 +0000 UTC m=+1149.363004106" watchObservedRunningTime="2025-09-29 17:10:19.237157756 +0000 UTC m=+1149.384935437" Sep 29 17:10:19 crc kubenswrapper[4592]: I0929 17:10:19.247428 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-hgnnh" podStartSLOduration=3.069749063 podStartE2EDuration="53.247400403s" podCreationTimestamp="2025-09-29 17:09:26 +0000 UTC" firstStartedPulling="2025-09-29 17:09:27.708652037 +0000 UTC m=+1097.856429718" lastFinishedPulling="2025-09-29 17:10:17.886303377 +0000 UTC m=+1148.034081058" observedRunningTime="2025-09-29 17:10:19.238291088 +0000 UTC m=+1149.386068769" watchObservedRunningTime="2025-09-29 17:10:19.247400403 +0000 UTC m=+1149.395178094" Sep 29 17:10:20 crc kubenswrapper[4592]: I0929 17:10:20.188293 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f2q9n" event={"ID":"8c80e2b1-f512-432e-87fe-c0ea60e6a546","Type":"ContainerStarted","Data":"8fd399cdc3e75e50cc234af8ce51516ffe384aed2b993c5d201c19bb2b45d1da"} Sep 29 17:10:20 crc kubenswrapper[4592]: I0929 17:10:20.213719 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-f2q9n" podStartSLOduration=4.05794544 podStartE2EDuration="55.213699444s" podCreationTimestamp="2025-09-29 17:09:25 +0000 UTC" firstStartedPulling="2025-09-29 17:09:27.517025914 +0000 UTC m=+1097.664803585" lastFinishedPulling="2025-09-29 17:10:18.672779908 +0000 UTC m=+1148.820557589" observedRunningTime="2025-09-29 17:10:20.203442877 +0000 UTC m=+1150.351220558" watchObservedRunningTime="2025-09-29 17:10:20.213699444 +0000 UTC m=+1150.361477125" Sep 29 17:10:22 crc kubenswrapper[4592]: I0929 17:10:22.211602 4592 generic.go:334] "Generic (PLEG): container finished" podID="f760ecfd-a454-4a77-89c3-0703ea63c515" containerID="3d153efe2d442d5cf9d5c2ea7ea15199eb598344fac9ac084fc12f46e2e1847a" exitCode=0 Sep 29 17:10:22 crc kubenswrapper[4592]: I0929 17:10:22.211693 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9gcq7" event={"ID":"f760ecfd-a454-4a77-89c3-0703ea63c515","Type":"ContainerDied","Data":"3d153efe2d442d5cf9d5c2ea7ea15199eb598344fac9ac084fc12f46e2e1847a"} Sep 29 17:10:24 crc kubenswrapper[4592]: I0929 17:10:24.238661 4592 generic.go:334] "Generic (PLEG): container finished" podID="4df9236d-2c26-4b89-acfc-d0de121eb93c" containerID="70eb1f280be325225e6184281858027bdcf55b861e6e8eddcd1312a382c946a4" exitCode=0 Sep 29 17:10:24 crc kubenswrapper[4592]: I0929 17:10:24.238749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hgnnh" event={"ID":"4df9236d-2c26-4b89-acfc-d0de121eb93c","Type":"ContainerDied","Data":"70eb1f280be325225e6184281858027bdcf55b861e6e8eddcd1312a382c946a4"} Sep 29 17:10:26 crc kubenswrapper[4592]: I0929 17:10:26.312669 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:10:26 crc kubenswrapper[4592]: I0929 17:10:26.481787 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.666517 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9gcq7" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.672884 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843678 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle\") pod \"f760ecfd-a454-4a77-89c3-0703ea63c515\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843805 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data\") pod \"f760ecfd-a454-4a77-89c3-0703ea63c515\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843834 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpmc4\" (UniqueName: \"kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4\") pod \"f760ecfd-a454-4a77-89c3-0703ea63c515\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843928 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data\") pod \"f760ecfd-a454-4a77-89c3-0703ea63c515\" (UID: \"f760ecfd-a454-4a77-89c3-0703ea63c515\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843952 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle\") pod \"4df9236d-2c26-4b89-acfc-d0de121eb93c\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.843987 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnnvs\" (UniqueName: \"kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs\") pod \"4df9236d-2c26-4b89-acfc-d0de121eb93c\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.844061 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data\") pod \"4df9236d-2c26-4b89-acfc-d0de121eb93c\" (UID: \"4df9236d-2c26-4b89-acfc-d0de121eb93c\") " Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.851488 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4" (OuterVolumeSpecName: "kube-api-access-jpmc4") pod "f760ecfd-a454-4a77-89c3-0703ea63c515" (UID: "f760ecfd-a454-4a77-89c3-0703ea63c515"). InnerVolumeSpecName "kube-api-access-jpmc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.854279 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs" (OuterVolumeSpecName: "kube-api-access-cnnvs") pod "4df9236d-2c26-4b89-acfc-d0de121eb93c" (UID: "4df9236d-2c26-4b89-acfc-d0de121eb93c"). InnerVolumeSpecName "kube-api-access-cnnvs". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.855743 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f760ecfd-a454-4a77-89c3-0703ea63c515" (UID: "f760ecfd-a454-4a77-89c3-0703ea63c515"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.865331 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4df9236d-2c26-4b89-acfc-d0de121eb93c" (UID: "4df9236d-2c26-4b89-acfc-d0de121eb93c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.891435 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f760ecfd-a454-4a77-89c3-0703ea63c515" (UID: "f760ecfd-a454-4a77-89c3-0703ea63c515"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.898784 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4df9236d-2c26-4b89-acfc-d0de121eb93c" (UID: "4df9236d-2c26-4b89-acfc-d0de121eb93c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.920578 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data" (OuterVolumeSpecName: "config-data") pod "f760ecfd-a454-4a77-89c3-0703ea63c515" (UID: "f760ecfd-a454-4a77-89c3-0703ea63c515"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947263 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpmc4\" (UniqueName: \"kubernetes.io/projected/f760ecfd-a454-4a77-89c3-0703ea63c515-kube-api-access-jpmc4\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947300 4592 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947310 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947320 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947330 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnnvs\" (UniqueName: \"kubernetes.io/projected/4df9236d-2c26-4b89-acfc-d0de121eb93c-kube-api-access-cnnvs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947338 4592 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4df9236d-2c26-4b89-acfc-d0de121eb93c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:27 crc kubenswrapper[4592]: I0929 17:10:27.947347 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f760ecfd-a454-4a77-89c3-0703ea63c515-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.308902 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-hgnnh" event={"ID":"4df9236d-2c26-4b89-acfc-d0de121eb93c","Type":"ContainerDied","Data":"2173c2273efa77e137922e163940a6ddcd11784af16c0a7bd4a96a65b0bc7db1"} Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.308951 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2173c2273efa77e137922e163940a6ddcd11784af16c0a7bd4a96a65b0bc7db1" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.308923 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-hgnnh" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.310593 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-9gcq7" event={"ID":"f760ecfd-a454-4a77-89c3-0703ea63c515","Type":"ContainerDied","Data":"ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48"} Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.310629 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac6d13fcf1163dd7a318e1c5e677496296390cc9c653a916325193838c81da48" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.310670 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-9gcq7" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.986927 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-78c8db585f-2qfs6"] Sep 29 17:10:28 crc kubenswrapper[4592]: E0929 17:10:28.987639 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" containerName="glance-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.987656 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" containerName="glance-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: E0929 17:10:28.987708 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" containerName="barbican-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.987716 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" containerName="barbican-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.987920 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" containerName="glance-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.987946 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" containerName="barbican-db-sync" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.989116 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.994132 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.994561 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qvw9b" Sep 29 17:10:28 crc kubenswrapper[4592]: I0929 17:10:28.996660 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.013892 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6fdf4f774d-zgzql"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.015714 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.032353 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.039972 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78c8db585f-2qfs6"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.077828 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6fdf4f774d-zgzql"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.085093 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-combined-ca-bundle\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.085182 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grwsp\" (UniqueName: \"kubernetes.io/projected/bda783a8-49d1-48be-9b21-695b1a673b1a-kube-api-access-grwsp\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.085217 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda783a8-49d1-48be-9b21-695b1a673b1a-logs\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.085323 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data-custom\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.085404 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.173889 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.184720 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189694 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd181b6d-4f45-415c-8038-4bf077b0a747-logs\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189731 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvrcq\" (UniqueName: \"kubernetes.io/projected/cd181b6d-4f45-415c-8038-4bf077b0a747-kube-api-access-lvrcq\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189759 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189816 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189856 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-combined-ca-bundle\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189890 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grwsp\" (UniqueName: \"kubernetes.io/projected/bda783a8-49d1-48be-9b21-695b1a673b1a-kube-api-access-grwsp\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189909 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda783a8-49d1-48be-9b21-695b1a673b1a-logs\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189932 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-combined-ca-bundle\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.189986 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data-custom\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.190020 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data-custom\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.195256 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data-custom\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.196876 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bda783a8-49d1-48be-9b21-695b1a673b1a-logs\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: E0929 17:10:29.223389 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.228119 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.234190 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-config-data\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.231795 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda783a8-49d1-48be-9b21-695b1a673b1a-combined-ca-bundle\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.256820 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grwsp\" (UniqueName: \"kubernetes.io/projected/bda783a8-49d1-48be-9b21-695b1a673b1a-kube-api-access-grwsp\") pod \"barbican-worker-78c8db585f-2qfs6\" (UID: \"bda783a8-49d1-48be-9b21-695b1a673b1a\") " pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.291544 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7stjq\" (UniqueName: \"kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296194 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296403 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296596 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-combined-ca-bundle\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296719 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296864 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data-custom\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.296986 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.297088 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.297368 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd181b6d-4f45-415c-8038-4bf077b0a747-logs\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.297460 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvrcq\" (UniqueName: \"kubernetes.io/projected/cd181b6d-4f45-415c-8038-4bf077b0a747-kube-api-access-lvrcq\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.297543 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.300028 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd181b6d-4f45-415c-8038-4bf077b0a747-logs\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.311241 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-78c8db585f-2qfs6" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.312545 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data-custom\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.316957 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-combined-ca-bundle\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.323698 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd181b6d-4f45-415c-8038-4bf077b0a747-config-data\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.339734 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvrcq\" (UniqueName: \"kubernetes.io/projected/cd181b6d-4f45-415c-8038-4bf077b0a747-kube-api-access-lvrcq\") pod \"barbican-keystone-listener-6fdf4f774d-zgzql\" (UID: \"cd181b6d-4f45-415c-8038-4bf077b0a747\") " pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.382971 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.401567 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.401625 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.401686 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7stjq\" (UniqueName: \"kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.401714 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.404242 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.405036 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.407908 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.408711 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerStarted","Data":"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8"} Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.408968 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="ceilometer-notification-agent" containerID="cri-o://e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c" gracePeriod=30 Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.409366 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.409786 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="proxy-httpd" containerID="cri-o://9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8" gracePeriod=30 Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.409866 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="sg-core" containerID="cri-o://034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366" gracePeriod=30 Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.429283 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.429500 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.430551 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.431212 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.457652 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7stjq\" (UniqueName: \"kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq\") pod \"dnsmasq-dns-54c88b6959-kzs9k\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.546376 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.548234 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.554694 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.559835 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.599815 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636065 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcsdk\" (UniqueName: \"kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636115 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636211 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636236 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636269 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.636402 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.676136 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.677754 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.685448 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.739556 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.739658 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.739689 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.739724 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcsdk\" (UniqueName: \"kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.739769 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741256 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qwkx\" (UniqueName: \"kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741336 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741427 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741457 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741495 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.741569 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.744821 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.760944 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.762013 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.776715 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.791136 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcsdk\" (UniqueName: \"kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk\") pod \"barbican-api-7d4fd79db4-cp892\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850423 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850464 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850489 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850534 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qwkx\" (UniqueName: \"kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850562 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.850595 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.851580 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.852080 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.852274 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.853464 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.853776 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:29 crc kubenswrapper[4592]: I0929 17:10:29.888225 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qwkx\" (UniqueName: \"kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx\") pod \"dnsmasq-dns-688c87cc99-hjm6g\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.074289 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.100018 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.444859 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.447221 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.455535 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.455841 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f6zww" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.455939 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.463049 4592 generic.go:334] "Generic (PLEG): container finished" podID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerID="034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366" exitCode=2 Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.463100 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerDied","Data":"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366"} Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.472662 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-78c8db585f-2qfs6"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.495957 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.518056 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6fdf4f774d-zgzql"] Sep 29 17:10:30 crc kubenswrapper[4592]: W0929 17:10:30.550942 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd181b6d_4f45_415c_8038_4bf077b0a747.slice/crio-097371cd960268b19672e290f4a9428418f91514902c5277d63e3ffa5afb7965 WatchSource:0}: Error finding container 097371cd960268b19672e290f4a9428418f91514902c5277d63e3ffa5afb7965: Status 404 returned error can't find the container with id 097371cd960268b19672e290f4a9428418f91514902c5277d63e3ffa5afb7965 Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.569744 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595837 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595878 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595899 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595934 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595955 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.595995 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.596297 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5j6v\" (UniqueName: \"kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698313 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698372 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698420 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698503 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5j6v\" (UniqueName: \"kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698526 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698550 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.698567 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.699489 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.699626 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.706589 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.724049 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.741878 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.743979 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.747433 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5j6v\" (UniqueName: \"kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.748386 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.786629 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.850238 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.856123 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.866608 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.927284 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:30 crc kubenswrapper[4592]: I0929 17:10:30.935359 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012528 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012604 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012662 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012687 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012714 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012751 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwh4m\" (UniqueName: \"kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.012824 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.061511 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116182 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116228 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116268 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116285 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116304 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116331 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwh4m\" (UniqueName: \"kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.116369 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.131851 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.136701 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.139344 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.144515 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.145738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.157578 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.194332 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwh4m\" (UniqueName: \"kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.246399 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.503617 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.569780 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" event={"ID":"c0593a9d-9915-41ac-8439-3c3eecf84e00","Type":"ContainerStarted","Data":"1c6bbb33a7ff9d133ed4dcb0c22232cad41e1f1ce17c2513d7cfb6e01ce6e0b2"} Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.623370 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" event={"ID":"cd181b6d-4f45-415c-8038-4bf077b0a747","Type":"ContainerStarted","Data":"097371cd960268b19672e290f4a9428418f91514902c5277d63e3ffa5afb7965"} Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.663940 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78c8db585f-2qfs6" event={"ID":"bda783a8-49d1-48be-9b21-695b1a673b1a","Type":"ContainerStarted","Data":"8306968dbcb30b5744d0486f7c05d69f4370bd38e0eb4b53181c3ff8b5fd0026"} Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.665367 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" event={"ID":"8974254d-64b0-41eb-978a-d2fb9988c8ce","Type":"ContainerStarted","Data":"3c3f0bd2d9bc5c9cece2165a244d270bcc2fb8377eda3b97d60b49ca631a7a82"} Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.667570 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerStarted","Data":"fc042eec41e95892258cd9182d0b12c8f76e1b3b040373a7b2226c79ab8590ab"} Sep 29 17:10:31 crc kubenswrapper[4592]: I0929 17:10:31.859399 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:31 crc kubenswrapper[4592]: W0929 17:10:31.896611 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3049408b_5861_4e80_b828_aabbd56d77f7.slice/crio-5c91995a6d43c3cadd547043ee0442ac93b28943a979b0200bd860f99256adb5 WatchSource:0}: Error finding container 5c91995a6d43c3cadd547043ee0442ac93b28943a979b0200bd860f99256adb5: Status 404 returned error can't find the container with id 5c91995a6d43c3cadd547043ee0442ac93b28943a979b0200bd860f99256adb5 Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.419682 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.688305 4592 generic.go:334] "Generic (PLEG): container finished" podID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerID="701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b" exitCode=0 Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.688467 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" event={"ID":"8974254d-64b0-41eb-978a-d2fb9988c8ce","Type":"ContainerDied","Data":"701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.701969 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerStarted","Data":"07771398b5eebf2af10287c46d9f7936d91160d4d8048f091c66b015078642fa"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.717401 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerStarted","Data":"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.717449 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerStarted","Data":"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.718577 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.718605 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.746927 4592 generic.go:334] "Generic (PLEG): container finished" podID="c0593a9d-9915-41ac-8439-3c3eecf84e00" containerID="90d81db8946c75488f87c17dfffa8f0419b79911c2c75a2fcb7744391cd541b7" exitCode=0 Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.746997 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" event={"ID":"c0593a9d-9915-41ac-8439-3c3eecf84e00","Type":"ContainerDied","Data":"90d81db8946c75488f87c17dfffa8f0419b79911c2c75a2fcb7744391cd541b7"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.747782 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7d4fd79db4-cp892" podStartSLOduration=3.747762846 podStartE2EDuration="3.747762846s" podCreationTimestamp="2025-09-29 17:10:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:32.74540913 +0000 UTC m=+1162.893186801" watchObservedRunningTime="2025-09-29 17:10:32.747762846 +0000 UTC m=+1162.895540527" Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.758526 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerStarted","Data":"5c91995a6d43c3cadd547043ee0442ac93b28943a979b0200bd860f99256adb5"} Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.813300 4592 generic.go:334] "Generic (PLEG): container finished" podID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerID="e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c" exitCode=0 Sep 29 17:10:32 crc kubenswrapper[4592]: I0929 17:10:32.813362 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerDied","Data":"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.468179 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521555 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521660 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521764 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521789 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521816 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7stjq\" (UniqueName: \"kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.521830 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.558327 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq" (OuterVolumeSpecName: "kube-api-access-7stjq") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "kube-api-access-7stjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.582332 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.624260 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.624299 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7stjq\" (UniqueName: \"kubernetes.io/projected/c0593a9d-9915-41ac-8439-3c3eecf84e00-kube-api-access-7stjq\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.650670 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config" (OuterVolumeSpecName: "config") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.673743 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:33 crc kubenswrapper[4592]: E0929 17:10:33.679476 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb podName:c0593a9d-9915-41ac-8439-3c3eecf84e00 nodeName:}" failed. No retries permitted until 2025-09-29 17:10:34.179450841 +0000 UTC m=+1164.327228522 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovsdbserver-sb" (UniqueName: "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00") : error deleting /var/lib/kubelet/pods/c0593a9d-9915-41ac-8439-3c3eecf84e00/volume-subpaths: remove /var/lib/kubelet/pods/c0593a9d-9915-41ac-8439-3c3eecf84e00/volume-subpaths: no such file or directory Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.680187 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.727872 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.727901 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.727911 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.841710 4592 generic.go:334] "Generic (PLEG): container finished" podID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" containerID="8fd399cdc3e75e50cc234af8ce51516ffe384aed2b993c5d201c19bb2b45d1da" exitCode=0 Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.841778 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f2q9n" event={"ID":"8c80e2b1-f512-432e-87fe-c0ea60e6a546","Type":"ContainerDied","Data":"8fd399cdc3e75e50cc234af8ce51516ffe384aed2b993c5d201c19bb2b45d1da"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.844108 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerStarted","Data":"240166c6b8583cdd734609a5f8ed0cebdf3c6bd6f0c18cc37e51b2b73264b840"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.847047 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" event={"ID":"8974254d-64b0-41eb-978a-d2fb9988c8ce","Type":"ContainerStarted","Data":"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.847204 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.849942 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerStarted","Data":"18c131fe3515379b2daab8552811941330ba63e9131b234abcda01f8ae939415"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.868959 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.869107 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54c88b6959-kzs9k" event={"ID":"c0593a9d-9915-41ac-8439-3c3eecf84e00","Type":"ContainerDied","Data":"1c6bbb33a7ff9d133ed4dcb0c22232cad41e1f1ce17c2513d7cfb6e01ce6e0b2"} Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.869132 4592 scope.go:117] "RemoveContainer" containerID="90d81db8946c75488f87c17dfffa8f0419b79911c2c75a2fcb7744391cd541b7" Sep 29 17:10:33 crc kubenswrapper[4592]: I0929 17:10:33.884088 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" podStartSLOduration=4.884076515 podStartE2EDuration="4.884076515s" podCreationTimestamp="2025-09-29 17:10:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:33.882283295 +0000 UTC m=+1164.030060986" watchObservedRunningTime="2025-09-29 17:10:33.884076515 +0000 UTC m=+1164.031854196" Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.238251 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") pod \"c0593a9d-9915-41ac-8439-3c3eecf84e00\" (UID: \"c0593a9d-9915-41ac-8439-3c3eecf84e00\") " Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.239376 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c0593a9d-9915-41ac-8439-3c3eecf84e00" (UID: "c0593a9d-9915-41ac-8439-3c3eecf84e00"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.326712 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.343162 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c0593a9d-9915-41ac-8439-3c3eecf84e00-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.539363 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.549652 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54c88b6959-kzs9k"] Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.884698 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerStarted","Data":"5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3"} Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.890942 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerStarted","Data":"9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f"} Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.952331 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.952309472 podStartE2EDuration="5.952309472s" podCreationTimestamp="2025-09-29 17:10:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:34.921360438 +0000 UTC m=+1165.069138119" watchObservedRunningTime="2025-09-29 17:10:34.952309472 +0000 UTC m=+1165.100087153" Sep 29 17:10:34 crc kubenswrapper[4592]: I0929 17:10:34.965699 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.9656826259999995 podStartE2EDuration="5.965682626s" podCreationTimestamp="2025-09-29 17:10:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:34.947262731 +0000 UTC m=+1165.095040412" watchObservedRunningTime="2025-09-29 17:10:34.965682626 +0000 UTC m=+1165.113460307" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.206523 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0593a9d-9915-41ac-8439-3c3eecf84e00" path="/var/lib/kubelet/pods/c0593a9d-9915-41ac-8439-3c3eecf84e00/volumes" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.207455 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.291976 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.389779 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.564747 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h644t\" (UniqueName: \"kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.564816 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.564846 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.564914 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.564996 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.565094 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle\") pod \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\" (UID: \"8c80e2b1-f512-432e-87fe-c0ea60e6a546\") " Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.566052 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.566496 4592 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8c80e2b1-f512-432e-87fe-c0ea60e6a546-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.571234 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.571234 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t" (OuterVolumeSpecName: "kube-api-access-h644t") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "kube-api-access-h644t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.571961 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts" (OuterVolumeSpecName: "scripts") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.610037 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.637550 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data" (OuterVolumeSpecName: "config-data") pod "8c80e2b1-f512-432e-87fe-c0ea60e6a546" (UID: "8c80e2b1-f512-432e-87fe-c0ea60e6a546"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.668501 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.668551 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h644t\" (UniqueName: \"kubernetes.io/projected/8c80e2b1-f512-432e-87fe-c0ea60e6a546-kube-api-access-h644t\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.668568 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.668579 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.668590 4592 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8c80e2b1-f512-432e-87fe-c0ea60e6a546-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.898749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f2q9n" event={"ID":"8c80e2b1-f512-432e-87fe-c0ea60e6a546","Type":"ContainerDied","Data":"e76f98eb69826a4fcc35def68622085245e6d134be8a7cfa2a3e60bc4c7836d5"} Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.898800 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e76f98eb69826a4fcc35def68622085245e6d134be8a7cfa2a3e60bc4c7836d5" Sep 29 17:10:35 crc kubenswrapper[4592]: I0929 17:10:35.898814 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f2q9n" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.171818 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:36 crc kubenswrapper[4592]: E0929 17:10:36.179727 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" containerName="cinder-db-sync" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.179764 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" containerName="cinder-db-sync" Sep 29 17:10:36 crc kubenswrapper[4592]: E0929 17:10:36.179779 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0593a9d-9915-41ac-8439-3c3eecf84e00" containerName="init" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.179785 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0593a9d-9915-41ac-8439-3c3eecf84e00" containerName="init" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.179992 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0593a9d-9915-41ac-8439-3c3eecf84e00" containerName="init" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.180008 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" containerName="cinder-db-sync" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.180976 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.183992 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.184356 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.184508 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.184628 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-vvbts" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.202509 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287063 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287139 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287214 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287235 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287268 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.287297 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qr6c\" (UniqueName: \"kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.288333 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.288592 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="dnsmasq-dns" containerID="cri-o://d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30" gracePeriod=10 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.330687 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.330749 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.337335 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab"} pod="openstack/horizon-c9567f99b-8nh47" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.337393 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" containerID="cri-o://5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab" gracePeriod=30 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.367229 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.368688 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391021 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391076 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391129 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391166 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391202 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.391235 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qr6c\" (UniqueName: \"kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.398631 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.401726 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.408441 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.413377 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.421585 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.437724 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qr6c\" (UniqueName: \"kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.442322 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.485204 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.485806 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.485909 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.487547 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d"} pod="openstack/horizon-749bb4c784-lnncs" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.487596 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" containerID="cri-o://5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d" gracePeriod=30 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512175 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512253 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512390 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzb9k\" (UniqueName: \"kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512518 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.512735 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.525355 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.533535 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.541543 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.546168 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.616026 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltkp9\" (UniqueName: \"kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.616077 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.616106 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.616126 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.619167 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.619762 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.621987 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.623079 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzb9k\" (UniqueName: \"kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.624310 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.624377 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.624451 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.624625 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.626241 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.626419 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.626534 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.627291 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.628183 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.634677 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.650514 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzb9k\" (UniqueName: \"kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k\") pod \"dnsmasq-dns-6bb4fc677f-94fbn\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.750845 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.758563 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.759504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.757078 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.759641 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.760290 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.760457 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltkp9\" (UniqueName: \"kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.760485 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.760554 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.778045 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.781544 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.784332 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.790269 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltkp9\" (UniqueName: \"kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.791439 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts\") pod \"cinder-api-0\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.851918 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.888920 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.895633 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.942917 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" event={"ID":"cd181b6d-4f45-415c-8038-4bf077b0a747","Type":"ContainerStarted","Data":"740937c04b5fbb92d315885ad3cd80e5924bff43eacec444944592a47992c8ae"} Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.956401 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78c8db585f-2qfs6" event={"ID":"bda783a8-49d1-48be-9b21-695b1a673b1a","Type":"ContainerStarted","Data":"1d891ce6b0cc2646d09bc39e35563f45ef762ea7352571a370baaddbaffcd73e"} Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.974563 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qwkx\" (UniqueName: \"kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.975137 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.975288 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.975318 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.975375 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.975402 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb\") pod \"8974254d-64b0-41eb-978a-d2fb9988c8ce\" (UID: \"8974254d-64b0-41eb-978a-d2fb9988c8ce\") " Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.979694 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5588c77f49-qmt48" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.996649 4592 generic.go:334] "Generic (PLEG): container finished" podID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerID="d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30" exitCode=0 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.996857 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-log" containerID="cri-o://18c131fe3515379b2daab8552811941330ba63e9131b234abcda01f8ae939415" gracePeriod=30 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.996975 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997347 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" event={"ID":"8974254d-64b0-41eb-978a-d2fb9988c8ce","Type":"ContainerDied","Data":"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30"} Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997375 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-hjm6g" event={"ID":"8974254d-64b0-41eb-978a-d2fb9988c8ce","Type":"ContainerDied","Data":"3c3f0bd2d9bc5c9cece2165a244d270bcc2fb8377eda3b97d60b49ca631a7a82"} Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997390 4592 scope.go:117] "RemoveContainer" containerID="d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30" Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997527 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-log" containerID="cri-o://240166c6b8583cdd734609a5f8ed0cebdf3c6bd6f0c18cc37e51b2b73264b840" gracePeriod=30 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997599 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-httpd" containerID="cri-o://9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f" gracePeriod=30 Sep 29 17:10:36 crc kubenswrapper[4592]: I0929 17:10:36.997839 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-httpd" containerID="cri-o://5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3" gracePeriod=30 Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.001698 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx" (OuterVolumeSpecName: "kube-api-access-5qwkx") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "kube-api-access-5qwkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.091299 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qwkx\" (UniqueName: \"kubernetes.io/projected/8974254d-64b0-41eb-978a-d2fb9988c8ce-kube-api-access-5qwkx\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.126568 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.127854 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6868c5fc6d-w5577" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-api" containerID="cri-o://cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e" gracePeriod=30 Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.128484 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-6868c5fc6d-w5577" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-httpd" containerID="cri-o://4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640" gracePeriod=30 Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.183336 4592 scope.go:117] "RemoveContainer" containerID="701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.244737 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5b969b6866-rjf85"] Sep 29 17:10:37 crc kubenswrapper[4592]: E0929 17:10:37.245092 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="init" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.245107 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="init" Sep 29 17:10:37 crc kubenswrapper[4592]: E0929 17:10:37.245132 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="dnsmasq-dns" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.245138 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="dnsmasq-dns" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.245350 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" containerName="dnsmasq-dns" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.246223 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.250590 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b969b6866-rjf85"] Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.252229 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.256776 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324156 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-public-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324234 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data-custom\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324258 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-combined-ca-bundle\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324274 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324297 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16bb91be-d91d-476e-a81d-44ef92c11718-logs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324344 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwq5h\" (UniqueName: \"kubernetes.io/projected/16bb91be-d91d-476e-a81d-44ef92c11718-kube-api-access-hwq5h\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.324370 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-internal-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.344317 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.345394 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config" (OuterVolumeSpecName: "config") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.391703 4592 scope.go:117] "RemoveContainer" containerID="d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30" Sep 29 17:10:37 crc kubenswrapper[4592]: W0929 17:10:37.398525 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d84c3f5_9c0f_473b_a83c_53651dc03ece.slice/crio-128df520a18c14c5bdbc1198195ce6cc5a69557e801bbcbfe05d0cae889ab354 WatchSource:0}: Error finding container 128df520a18c14c5bdbc1198195ce6cc5a69557e801bbcbfe05d0cae889ab354: Status 404 returned error can't find the container with id 128df520a18c14c5bdbc1198195ce6cc5a69557e801bbcbfe05d0cae889ab354 Sep 29 17:10:37 crc kubenswrapper[4592]: E0929 17:10:37.398613 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30\": container with ID starting with d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30 not found: ID does not exist" containerID="d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.398653 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30"} err="failed to get container status \"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30\": rpc error: code = NotFound desc = could not find container \"d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30\": container with ID starting with d0f8c9df24d6474b9053abe19ca489ad950513e6751c018a67b173340abaac30 not found: ID does not exist" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.398681 4592 scope.go:117] "RemoveContainer" containerID="701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b" Sep 29 17:10:37 crc kubenswrapper[4592]: E0929 17:10:37.421456 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b\": container with ID starting with 701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b not found: ID does not exist" containerID="701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.421502 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b"} err="failed to get container status \"701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b\": rpc error: code = NotFound desc = could not find container \"701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b\": container with ID starting with 701cc56514e9f66e76d42bb535664dda218be3bf4532114d21e82d5e4d1f0f3b not found: ID does not exist" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425561 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwq5h\" (UniqueName: \"kubernetes.io/projected/16bb91be-d91d-476e-a81d-44ef92c11718-kube-api-access-hwq5h\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425609 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-internal-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425683 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-public-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425750 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data-custom\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425777 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-combined-ca-bundle\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425803 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425833 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16bb91be-d91d-476e-a81d-44ef92c11718-logs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.425909 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.426324 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16bb91be-d91d-476e-a81d-44ef92c11718-logs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.458454 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-combined-ca-bundle\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.459671 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data-custom\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.460956 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-internal-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.484623 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-public-tls-certs\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.485631 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16bb91be-d91d-476e-a81d-44ef92c11718-config-data\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.489278 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwq5h\" (UniqueName: \"kubernetes.io/projected/16bb91be-d91d-476e-a81d-44ef92c11718-kube-api-access-hwq5h\") pod \"barbican-api-5b969b6866-rjf85\" (UID: \"16bb91be-d91d-476e-a81d-44ef92c11718\") " pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.582761 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.624712 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.641940 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.643907 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.685041 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: W0929 17:10:37.746161 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ddab092_5ab2_420d_8d8f_30ce7633185d.slice/crio-c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c WatchSource:0}: Error finding container c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c: Status 404 returned error can't find the container with id c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.747691 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.791906 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.820667 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8974254d-64b0-41eb-978a-d2fb9988c8ce" (UID: "8974254d-64b0-41eb-978a-d2fb9988c8ce"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.849179 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.849209 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8974254d-64b0-41eb-978a-d2fb9988c8ce-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:37 crc kubenswrapper[4592]: I0929 17:10:37.958960 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.043213 4592 generic.go:334] "Generic (PLEG): container finished" podID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerID="4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640" exitCode=0 Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.043311 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerDied","Data":"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.069406 4592 generic.go:334] "Generic (PLEG): container finished" podID="3049408b-5861-4e80-b828-aabbd56d77f7" containerID="5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3" exitCode=0 Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.069441 4592 generic.go:334] "Generic (PLEG): container finished" podID="3049408b-5861-4e80-b828-aabbd56d77f7" containerID="240166c6b8583cdd734609a5f8ed0cebdf3c6bd6f0c18cc37e51b2b73264b840" exitCode=143 Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.069505 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerDied","Data":"5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.069530 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerDied","Data":"240166c6b8583cdd734609a5f8ed0cebdf3c6bd6f0c18cc37e51b2b73264b840"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.098819 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-78c8db585f-2qfs6" event={"ID":"bda783a8-49d1-48be-9b21-695b1a673b1a","Type":"ContainerStarted","Data":"efd7d7eabb702d80be892eaa7343cf5f2fd7e9ed8a32bb123559cca335ecdc3b"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.122568 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" event={"ID":"6ddab092-5ab2-420d-8d8f-30ce7633185d","Type":"ContainerStarted","Data":"c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.136479 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerStarted","Data":"128df520a18c14c5bdbc1198195ce6cc5a69557e801bbcbfe05d0cae889ab354"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.172702 4592 generic.go:334] "Generic (PLEG): container finished" podID="fa653449-3b0a-461b-8289-3fe84654e411" containerID="9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f" exitCode=0 Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.172730 4592 generic.go:334] "Generic (PLEG): container finished" podID="fa653449-3b0a-461b-8289-3fe84654e411" containerID="18c131fe3515379b2daab8552811941330ba63e9131b234abcda01f8ae939415" exitCode=143 Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.172795 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerDied","Data":"9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.172820 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerDied","Data":"18c131fe3515379b2daab8552811941330ba63e9131b234abcda01f8ae939415"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.179873 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.237391 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" event={"ID":"cd181b6d-4f45-415c-8038-4bf077b0a747","Type":"ContainerStarted","Data":"517a48f30ee601b8e8d3bb249bbb0255efb26ca01f95289ea6f83aa00684cad3"} Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.253827 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-hjm6g"] Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.261477 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-78c8db585f-2qfs6" podStartSLOduration=4.7529919320000005 podStartE2EDuration="10.261461252s" podCreationTimestamp="2025-09-29 17:10:28 +0000 UTC" firstStartedPulling="2025-09-29 17:10:30.477070673 +0000 UTC m=+1160.624848354" lastFinishedPulling="2025-09-29 17:10:35.985539993 +0000 UTC m=+1166.133317674" observedRunningTime="2025-09-29 17:10:38.141796381 +0000 UTC m=+1168.289574072" watchObservedRunningTime="2025-09-29 17:10:38.261461252 +0000 UTC m=+1168.409238933" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.309668 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6fdf4f774d-zgzql" podStartSLOduration=4.889884435 podStartE2EDuration="10.309650178s" podCreationTimestamp="2025-09-29 17:10:28 +0000 UTC" firstStartedPulling="2025-09-29 17:10:30.558519557 +0000 UTC m=+1160.706297238" lastFinishedPulling="2025-09-29 17:10:35.97828529 +0000 UTC m=+1166.126062981" observedRunningTime="2025-09-29 17:10:38.300173723 +0000 UTC m=+1168.447951404" watchObservedRunningTime="2025-09-29 17:10:38.309650178 +0000 UTC m=+1168.457427859" Sep 29 17:10:38 crc kubenswrapper[4592]: E0929 17:10:38.449128 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4e5f4d8_95ed_4996_8bd4_44029abd998c.slice/crio-4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda4e5f4d8_95ed_4996_8bd4_44029abd998c.slice/crio-conmon-4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa653449_3b0a_461b_8289_3fe84654e411.slice/crio-conmon-9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3049408b_5861_4e80_b828_aabbd56d77f7.slice/crio-conmon-5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.627180 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b969b6866-rjf85"] Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.727769 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.826981 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827014 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827236 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827263 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827297 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827316 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5j6v\" (UniqueName: \"kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.827369 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle\") pod \"3049408b-5861-4e80-b828-aabbd56d77f7\" (UID: \"3049408b-5861-4e80-b828-aabbd56d77f7\") " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.828328 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs" (OuterVolumeSpecName: "logs") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.828517 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.829121 4592 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.829136 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3049408b-5861-4e80-b828-aabbd56d77f7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.844543 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts" (OuterVolumeSpecName: "scripts") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.849412 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.855376 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v" (OuterVolumeSpecName: "kube-api-access-k5j6v") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "kube-api-access-k5j6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.933364 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.933690 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5j6v\" (UniqueName: \"kubernetes.io/projected/3049408b-5861-4e80-b828-aabbd56d77f7-kube-api-access-k5j6v\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.933720 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 17:10:38 crc kubenswrapper[4592]: I0929 17:10:38.989106 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data" (OuterVolumeSpecName: "config-data") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.014378 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3049408b-5861-4e80-b828-aabbd56d77f7" (UID: "3049408b-5861-4e80-b828-aabbd56d77f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.026023 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.035218 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.035256 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.035269 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3049408b-5861-4e80-b828-aabbd56d77f7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.076393 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.203541 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8974254d-64b0-41eb-978a-d2fb9988c8ce" path="/var/lib/kubelet/pods/8974254d-64b0-41eb-978a-d2fb9988c8ce/volumes" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.237389 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.237985 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238016 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwh4m\" (UniqueName: \"kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238521 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238252 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238637 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238693 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.238716 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle\") pod \"fa653449-3b0a-461b-8289-3fe84654e411\" (UID: \"fa653449-3b0a-461b-8289-3fe84654e411\") " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.239216 4592 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.239317 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs" (OuterVolumeSpecName: "logs") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.243809 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m" (OuterVolumeSpecName: "kube-api-access-rwh4m") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "kube-api-access-rwh4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.245175 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.264384 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts" (OuterVolumeSpecName: "scripts") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.290579 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3049408b-5861-4e80-b828-aabbd56d77f7","Type":"ContainerDied","Data":"5c91995a6d43c3cadd547043ee0442ac93b28943a979b0200bd860f99256adb5"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.290630 4592 scope.go:117] "RemoveContainer" containerID="5489585c8e97bf5e78ec6a967bfcd04f4d742cd7ab83a534e5e9c5d8eff1def3" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.290750 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.309275 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.330817 4592 generic.go:334] "Generic (PLEG): container finished" podID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerID="ca1704287963a5c83e54fa0e1b419f11f34980053ebd6be3dab83dfef69bded2" exitCode=0 Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.330908 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" event={"ID":"6ddab092-5ab2-420d-8d8f-30ce7633185d","Type":"ContainerDied","Data":"ca1704287963a5c83e54fa0e1b419f11f34980053ebd6be3dab83dfef69bded2"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.340640 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa653449-3b0a-461b-8289-3fe84654e411-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.340673 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.340683 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.340693 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.340701 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwh4m\" (UniqueName: \"kubernetes.io/projected/fa653449-3b0a-461b-8289-3fe84654e411-kube-api-access-rwh4m\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.368667 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data" (OuterVolumeSpecName: "config-data") pod "fa653449-3b0a-461b-8289-3fe84654e411" (UID: "fa653449-3b0a-461b-8289-3fe84654e411"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.445248 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa653449-3b0a-461b-8289-3fe84654e411-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.447858 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"fa653449-3b0a-461b-8289-3fe84654e411","Type":"ContainerDied","Data":"07771398b5eebf2af10287c46d9f7936d91160d4d8048f091c66b015078642fa"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.447944 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.450420 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.506568 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b969b6866-rjf85" event={"ID":"16bb91be-d91d-476e-a81d-44ef92c11718","Type":"ContainerStarted","Data":"524e39033cd92fc024e2e71c68b29ea145e23c427a1940f622a0aed073a20b04"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.506968 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b969b6866-rjf85" event={"ID":"16bb91be-d91d-476e-a81d-44ef92c11718","Type":"ContainerStarted","Data":"18d8f341d710c32b708c019c02086299d8bf1147ec0bf8ed47f399856590e76c"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.521581 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerStarted","Data":"d0de9470b881b96c294eb3e5502b422afc140525247572859aed05cef8b0b1a1"} Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.550649 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.552175 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.557887 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.569102 4592 scope.go:117] "RemoveContainer" containerID="240166c6b8583cdd734609a5f8ed0cebdf3c6bd6f0c18cc37e51b2b73264b840" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603190 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: E0929 17:10:39.603569 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603588 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: E0929 17:10:39.603619 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603625 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: E0929 17:10:39.603638 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603645 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: E0929 17:10:39.603662 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603671 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603840 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603858 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603871 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" containerName="glance-log" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.603887 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa653449-3b0a-461b-8289-3fe84654e411" containerName="glance-httpd" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.604755 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.616089 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f6zww" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.616117 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.616267 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.616317 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.631028 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.640563 4592 scope.go:117] "RemoveContainer" containerID="9883de3f3b2d98862b2ff02d47096a2aab2045c7f0c77cb1f1c459fd5fa7090f" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.665355 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.707213 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.748233 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.750209 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755001 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755246 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755256 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755337 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755364 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755435 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755459 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755504 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755535 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.755585 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lxmc\" (UniqueName: \"kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.773864 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.796878 4592 scope.go:117] "RemoveContainer" containerID="18c131fe3515379b2daab8552811941330ba63e9131b234abcda01f8ae939415" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.858851 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.858919 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.858964 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.858991 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859036 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lxmc\" (UniqueName: \"kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859066 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859110 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859131 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859616 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.859826 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.860918 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.901697 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.908715 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.909813 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.910022 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lxmc\" (UniqueName: \"kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.911030 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962315 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962628 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962670 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962696 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962752 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzs5r\" (UniqueName: \"kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962797 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962842 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:39 crc kubenswrapper[4592]: I0929 17:10:39.962867 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.001178 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " pod="openstack/glance-default-external-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064739 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064804 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064845 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064874 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064926 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzs5r\" (UniqueName: \"kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.064965 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.065005 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.065032 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.065896 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.074619 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.074669 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.075108 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.088199 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.090763 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.100385 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzs5r\" (UniqueName: \"kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.100932 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.229238 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.243225 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.392583 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.634317 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerStarted","Data":"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502"} Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.638643 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b969b6866-rjf85" event={"ID":"16bb91be-d91d-476e-a81d-44ef92c11718","Type":"ContainerStarted","Data":"d3928dc758c816ddced13ddb8c277c48078252e02d7d8fd594d60f069ba2c93f"} Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.639190 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.639251 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.660688 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerStarted","Data":"64eb404d40baa738eac130951ad70c8afedbda4eca21b126099e3007ea770b8c"} Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.675375 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" event={"ID":"6ddab092-5ab2-420d-8d8f-30ce7633185d","Type":"ContainerStarted","Data":"8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549"} Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.676110 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.707187 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5b969b6866-rjf85" podStartSLOduration=3.7071696320000003 podStartE2EDuration="3.707169632s" podCreationTimestamp="2025-09-29 17:10:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:40.703618613 +0000 UTC m=+1170.851396294" watchObservedRunningTime="2025-09-29 17:10:40.707169632 +0000 UTC m=+1170.854947313" Sep 29 17:10:40 crc kubenswrapper[4592]: I0929 17:10:40.741320 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" podStartSLOduration=4.741113091 podStartE2EDuration="4.741113091s" podCreationTimestamp="2025-09-29 17:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:40.725180855 +0000 UTC m=+1170.872958546" watchObservedRunningTime="2025-09-29 17:10:40.741113091 +0000 UTC m=+1170.888890782" Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.116363 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.158327 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.178307 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.241466 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3049408b-5861-4e80-b828-aabbd56d77f7" path="/var/lib/kubelet/pods/3049408b-5861-4e80-b828-aabbd56d77f7/volumes" Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.244242 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa653449-3b0a-461b-8289-3fe84654e411" path="/var/lib/kubelet/pods/fa653449-3b0a-461b-8289-3fe84654e411/volumes" Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.401689 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.714373 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerStarted","Data":"c5ce67a9092c8fbf9d5ce3f414b67efeb45f307227b3c13646a20c1c35a476c2"} Sep 29 17:10:41 crc kubenswrapper[4592]: I0929 17:10:41.728482 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerStarted","Data":"a5e6e71a863101fd2eb00e2b14e1f3355e4f397c77569736634f7f6b15142c9d"} Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.785438 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerStarted","Data":"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75"} Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.817338 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerStarted","Data":"0a28d90387b116cbe9bda7918e78a00a6ec41a55f1104976d4f631f6db97691c"} Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.829203 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerStarted","Data":"54f98ddefde048f789316f0ac3351b088b7a47b89500ac0446bf95c04b7830a3"} Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.829346 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api-log" containerID="cri-o://64eb404d40baa738eac130951ad70c8afedbda4eca21b126099e3007ea770b8c" gracePeriod=30 Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.829572 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.829808 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api" containerID="cri-o://54f98ddefde048f789316f0ac3351b088b7a47b89500ac0446bf95c04b7830a3" gracePeriod=30 Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.832263 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.6869321809999995 podStartE2EDuration="6.83224775s" podCreationTimestamp="2025-09-29 17:10:36 +0000 UTC" firstStartedPulling="2025-09-29 17:10:37.421559961 +0000 UTC m=+1167.569337632" lastFinishedPulling="2025-09-29 17:10:38.56687552 +0000 UTC m=+1168.714653201" observedRunningTime="2025-09-29 17:10:42.823631969 +0000 UTC m=+1172.971409650" watchObservedRunningTime="2025-09-29 17:10:42.83224775 +0000 UTC m=+1172.980025431" Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.834821 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerStarted","Data":"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30"} Sep 29 17:10:42 crc kubenswrapper[4592]: I0929 17:10:42.891692 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.891674849 podStartE2EDuration="6.891674849s" podCreationTimestamp="2025-09-29 17:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:42.885025214 +0000 UTC m=+1173.032802895" watchObservedRunningTime="2025-09-29 17:10:42.891674849 +0000 UTC m=+1173.039452530" Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.843975 4592 generic.go:334] "Generic (PLEG): container finished" podID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerID="64eb404d40baa738eac130951ad70c8afedbda4eca21b126099e3007ea770b8c" exitCode=143 Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.844025 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerDied","Data":"64eb404d40baa738eac130951ad70c8afedbda4eca21b126099e3007ea770b8c"} Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.846400 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerStarted","Data":"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88"} Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.848976 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerStarted","Data":"7a4b6601de02b94df7230c8b23457160ce1a4cd3fc6d168303e2f92be4b954f5"} Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.892767 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.892744572 podStartE2EDuration="4.892744572s" podCreationTimestamp="2025-09-29 17:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:43.869363478 +0000 UTC m=+1174.017141159" watchObservedRunningTime="2025-09-29 17:10:43.892744572 +0000 UTC m=+1174.040522253" Sep 29 17:10:43 crc kubenswrapper[4592]: I0929 17:10:43.893279 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.893275116 podStartE2EDuration="4.893275116s" podCreationTimestamp="2025-09-29 17:10:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:43.885124289 +0000 UTC m=+1174.032901990" watchObservedRunningTime="2025-09-29 17:10:43.893275116 +0000 UTC m=+1174.041052797" Sep 29 17:10:44 crc kubenswrapper[4592]: I0929 17:10:44.096339 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:44 crc kubenswrapper[4592]: I0929 17:10:44.857124 4592 generic.go:334] "Generic (PLEG): container finished" podID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerID="54f98ddefde048f789316f0ac3351b088b7a47b89500ac0446bf95c04b7830a3" exitCode=0 Sep 29 17:10:44 crc kubenswrapper[4592]: I0929 17:10:44.857185 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerDied","Data":"54f98ddefde048f789316f0ac3351b088b7a47b89500ac0446bf95c04b7830a3"} Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.105890 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.118027 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.423719 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.547777 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.547842 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.547873 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.547910 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.547909 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.548286 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs" (OuterVolumeSpecName: "logs") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.548317 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.548595 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.548902 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltkp9\" (UniqueName: \"kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9\") pod \"08289c5c-740a-4280-85c0-60ed62c01b4f\" (UID: \"08289c5c-740a-4280-85c0-60ed62c01b4f\") " Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.549624 4592 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08289c5c-740a-4280-85c0-60ed62c01b4f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.549639 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08289c5c-740a-4280-85c0-60ed62c01b4f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.555195 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9" (OuterVolumeSpecName: "kube-api-access-ltkp9") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "kube-api-access-ltkp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.575540 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.575620 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts" (OuterVolumeSpecName: "scripts") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.616278 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.645237 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data" (OuterVolumeSpecName: "config-data") pod "08289c5c-740a-4280-85c0-60ed62c01b4f" (UID: "08289c5c-740a-4280-85c0-60ed62c01b4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.651829 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.651967 4592 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.652046 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.652107 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltkp9\" (UniqueName: \"kubernetes.io/projected/08289c5c-740a-4280-85c0-60ed62c01b4f-kube-api-access-ltkp9\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.652181 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08289c5c-740a-4280-85c0-60ed62c01b4f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.872826 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"08289c5c-740a-4280-85c0-60ed62c01b4f","Type":"ContainerDied","Data":"d0de9470b881b96c294eb3e5502b422afc140525247572859aed05cef8b0b1a1"} Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.872866 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.872888 4592 scope.go:117] "RemoveContainer" containerID="54f98ddefde048f789316f0ac3351b088b7a47b89500ac0446bf95c04b7830a3" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.910280 4592 scope.go:117] "RemoveContainer" containerID="64eb404d40baa738eac130951ad70c8afedbda4eca21b126099e3007ea770b8c" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.913687 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.945617 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.986679 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:45 crc kubenswrapper[4592]: E0929 17:10:45.987292 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api-log" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.987368 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api-log" Sep 29 17:10:45 crc kubenswrapper[4592]: E0929 17:10:45.987439 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.987522 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.987809 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api-log" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.987877 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" containerName="cinder-api" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.989518 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.993341 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.997699 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.997879 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Sep 29 17:10:45 crc kubenswrapper[4592]: I0929 17:10:45.997983 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.164999 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165372 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165498 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165533 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165601 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165645 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-logs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165749 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165803 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-scripts\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.165838 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zszcq\" (UniqueName: \"kubernetes.io/projected/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-kube-api-access-zszcq\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267437 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267517 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-logs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267613 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267670 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-scripts\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267711 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zszcq\" (UniqueName: \"kubernetes.io/projected/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-kube-api-access-zszcq\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267758 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267804 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267834 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.267902 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.269273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.269705 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-logs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.276323 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.281164 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.281289 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-scripts\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.281665 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.283574 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.284179 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-config-data-custom\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.292835 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zszcq\" (UniqueName: \"kubernetes.io/projected/aa1f23ba-8aae-4a33-8946-7cfcd7087e6e-kube-api-access-zszcq\") pod \"cinder-api-0\" (UID: \"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e\") " pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.333337 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.527989 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.862715 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.910501 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.931351 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.931582 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="dnsmasq-dns" containerID="cri-o://21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba" gracePeriod=10 Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.933615 4592 generic.go:334] "Generic (PLEG): container finished" podID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerID="cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e" exitCode=0 Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.933646 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerDied","Data":"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e"} Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.933665 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6868c5fc6d-w5577" event={"ID":"a4e5f4d8-95ed-4996-8bd4-44029abd998c","Type":"ContainerDied","Data":"42e9948f7b231c4e87fe86a9f5e1ddcd3fc4116c8f9017af6cd15fc70da940ec"} Sep 29 17:10:46 crc kubenswrapper[4592]: I0929 17:10:46.933697 4592 scope.go:117] "RemoveContainer" containerID="4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.013017 4592 scope.go:117] "RemoveContainer" containerID="cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.021565 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.046963 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.095655 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs\") pod \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.095956 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle\") pod \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.095988 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c49c2\" (UniqueName: \"kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2\") pod \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.096620 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config\") pod \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.096648 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config\") pod \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\" (UID: \"a4e5f4d8-95ed-4996-8bd4-44029abd998c\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.099284 4592 scope.go:117] "RemoveContainer" containerID="4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640" Sep 29 17:10:47 crc kubenswrapper[4592]: E0929 17:10:47.109131 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640\": container with ID starting with 4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640 not found: ID does not exist" containerID="4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.109192 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640"} err="failed to get container status \"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640\": rpc error: code = NotFound desc = could not find container \"4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640\": container with ID starting with 4aa4c865d8fb8f8e0cee2a4fc8434517763bc6aae84bef2a559e2ddfdea32640 not found: ID does not exist" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.109216 4592 scope.go:117] "RemoveContainer" containerID="cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e" Sep 29 17:10:47 crc kubenswrapper[4592]: E0929 17:10:47.109489 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e\": container with ID starting with cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e not found: ID does not exist" containerID="cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.109537 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e"} err="failed to get container status \"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e\": rpc error: code = NotFound desc = could not find container \"cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e\": container with ID starting with cfcfc14b448cc00b384c91dc9d1908e95df1bd2606d956617c77e49356b7887e not found: ID does not exist" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.119317 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a4e5f4d8-95ed-4996-8bd4-44029abd998c" (UID: "a4e5f4d8-95ed-4996-8bd4-44029abd998c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.138833 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2" (OuterVolumeSpecName: "kube-api-access-c49c2") pod "a4e5f4d8-95ed-4996-8bd4-44029abd998c" (UID: "a4e5f4d8-95ed-4996-8bd4-44029abd998c"). InnerVolumeSpecName "kube-api-access-c49c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.188750 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a4e5f4d8-95ed-4996-8bd4-44029abd998c" (UID: "a4e5f4d8-95ed-4996-8bd4-44029abd998c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.200252 4592 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-httpd-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.200293 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.200307 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c49c2\" (UniqueName: \"kubernetes.io/projected/a4e5f4d8-95ed-4996-8bd4-44029abd998c-kube-api-access-c49c2\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.200434 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08289c5c-740a-4280-85c0-60ed62c01b4f" path="/var/lib/kubelet/pods/08289c5c-740a-4280-85c0-60ed62c01b4f/volumes" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.245873 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config" (OuterVolumeSpecName: "config") pod "a4e5f4d8-95ed-4996-8bd4-44029abd998c" (UID: "a4e5f4d8-95ed-4996-8bd4-44029abd998c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.276727 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a4e5f4d8-95ed-4996-8bd4-44029abd998c" (UID: "a4e5f4d8-95ed-4996-8bd4-44029abd998c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.301235 4592 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.301263 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a4e5f4d8-95ed-4996-8bd4-44029abd998c-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.750779 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819519 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819573 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819684 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819706 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819778 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhgnw\" (UniqueName: \"kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.819875 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb\") pod \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\" (UID: \"e4096f1b-fd5a-4de6-8b4a-019801da00ea\") " Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.848817 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw" (OuterVolumeSpecName: "kube-api-access-fhgnw") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "kube-api-access-fhgnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.928541 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhgnw\" (UniqueName: \"kubernetes.io/projected/e4096f1b-fd5a-4de6-8b4a-019801da00ea-kube-api-access-fhgnw\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:47 crc kubenswrapper[4592]: I0929 17:10:47.974091 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config" (OuterVolumeSpecName: "config") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.035400 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.043709 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.044017 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e","Type":"ContainerStarted","Data":"12665c110ae1394b10b358984d0abc6a0f4ed300557f68f359c976cf6c9ff387"} Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.076766 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.076820 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.086300 4592 generic.go:334] "Generic (PLEG): container finished" podID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerID="21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba" exitCode=0 Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.086358 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" event={"ID":"e4096f1b-fd5a-4de6-8b4a-019801da00ea","Type":"ContainerDied","Data":"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba"} Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.086383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" event={"ID":"e4096f1b-fd5a-4de6-8b4a-019801da00ea","Type":"ContainerDied","Data":"4a4b84e27f2174e168c07f871974e7af625a2e0512b9f38ef7547ac474a898d2"} Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.086400 4592 scope.go:117] "RemoveContainer" containerID="21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.086527 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-th7d5" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.118826 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6868c5fc6d-w5577" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.153486 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.153533 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.153547 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.164409 4592 scope.go:117] "RemoveContainer" containerID="11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.215050 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e4096f1b-fd5a-4de6-8b4a-019801da00ea" (UID: "e4096f1b-fd5a-4de6-8b4a-019801da00ea"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.220456 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.230499 4592 scope.go:117] "RemoveContainer" containerID="21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba" Sep 29 17:10:48 crc kubenswrapper[4592]: E0929 17:10:48.238395 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba\": container with ID starting with 21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba not found: ID does not exist" containerID="21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.238459 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba"} err="failed to get container status \"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba\": rpc error: code = NotFound desc = could not find container \"21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba\": container with ID starting with 21214cc82eafeff0c00b7f4c24fe4ecffd5b15e3b5ecf2222ce93f9ed98457ba not found: ID does not exist" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.238489 4592 scope.go:117] "RemoveContainer" containerID="11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112" Sep 29 17:10:48 crc kubenswrapper[4592]: E0929 17:10:48.239110 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112\": container with ID starting with 11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112 not found: ID does not exist" containerID="11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.239138 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112"} err="failed to get container status \"11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112\": rpc error: code = NotFound desc = could not find container \"11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112\": container with ID starting with 11a2076e079e14665760e16f4e6b44210cc0e1f4cc93f78e583faf096d51a112 not found: ID does not exist" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.247995 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-6868c5fc6d-w5577"] Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.271474 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e4096f1b-fd5a-4de6-8b4a-019801da00ea-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.624704 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:48 crc kubenswrapper[4592]: I0929 17:10:48.633570 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-th7d5"] Sep 29 17:10:49 crc kubenswrapper[4592]: I0929 17:10:49.140167 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e","Type":"ContainerStarted","Data":"7887a9e0b5caf21b810b60e387799bdf7cced7a79587b387b9f810467a15acbf"} Sep 29 17:10:49 crc kubenswrapper[4592]: I0929 17:10:49.203138 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" path="/var/lib/kubelet/pods/a4e5f4d8-95ed-4996-8bd4-44029abd998c/volumes" Sep 29 17:10:49 crc kubenswrapper[4592]: I0929 17:10:49.203733 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" path="/var/lib/kubelet/pods/e4096f1b-fd5a-4de6-8b4a-019801da00ea/volumes" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.105814 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.167464 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"aa1f23ba-8aae-4a33-8946-7cfcd7087e6e","Type":"ContainerStarted","Data":"02ad15da801e41296564303f05d5cd7bc9c6eb0be6e70f741bf12b330f314904"} Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.167896 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.194071 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.19404821 podStartE2EDuration="5.19404821s" podCreationTimestamp="2025-09-29 17:10:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:10:50.187279112 +0000 UTC m=+1180.335056793" watchObservedRunningTime="2025-09-29 17:10:50.19404821 +0000 UTC m=+1180.341825891" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.243766 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.243835 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.295769 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.299915 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.311872 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.312767 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6cbb8cd48-47ckj" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.395406 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.395456 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.433573 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:50 crc kubenswrapper[4592]: I0929 17:10:50.527742 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.139862 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b969b6866-rjf85" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.203608 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.219502 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.219559 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.219574 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.219584 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.219783 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" containerID="cri-o://b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c" gracePeriod=30 Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.220229 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" containerID="cri-o://94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472" gracePeriod=30 Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.536879 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.588000 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:51 crc kubenswrapper[4592]: I0929 17:10:51.631871 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-584d46f4c7-tdlrl" Sep 29 17:10:52 crc kubenswrapper[4592]: I0929 17:10:52.194351 4592 generic.go:334] "Generic (PLEG): container finished" podID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerID="b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c" exitCode=143 Sep 29 17:10:52 crc kubenswrapper[4592]: I0929 17:10:52.194465 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerDied","Data":"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c"} Sep 29 17:10:52 crc kubenswrapper[4592]: I0929 17:10:52.194618 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="cinder-scheduler" containerID="cri-o://62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502" gracePeriod=30 Sep 29 17:10:52 crc kubenswrapper[4592]: I0929 17:10:52.194660 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="probe" containerID="cri-o://71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75" gracePeriod=30 Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.050385 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.051040 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="dnsmasq-dns" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051054 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="dnsmasq-dns" Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.051082 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-httpd" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051088 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-httpd" Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.051098 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-api" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051104 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-api" Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.051117 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="init" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051123 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="init" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051304 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-api" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051323 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4096f1b-fd5a-4de6-8b4a-019801da00ea" containerName="dnsmasq-dns" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051338 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4e5f4d8-95ed-4996-8bd4-44029abd998c" containerName="neutron-httpd" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.051870 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.063550 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.064425 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.064953 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.065255 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-4nhfm" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.198210 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.198264 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.198327 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9lxb\" (UniqueName: \"kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.198367 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204729 4592 generic.go:334] "Generic (PLEG): container finished" podID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerID="71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75" exitCode=0 Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204810 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204819 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204827 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerDied","Data":"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75"} Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204949 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.204964 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.314097 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.314137 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.314186 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9lxb\" (UniqueName: \"kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.314249 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.315560 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.320304 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.320609 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.329742 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9lxb\" (UniqueName: \"kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb\") pod \"openstackclient\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.394882 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.396270 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.432048 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.498848 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.500223 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.519634 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.617796 4592 log.go:32] "RunPodSandbox from runtime service failed" err=< Sep 29 17:10:53 crc kubenswrapper[4592]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_15fb60ef-a28b-4d10-9de6-d53430151724_0(44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57" Netns:"/var/run/netns/968825ad-5894-4811-adfe-584956179ded" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57;K8S_POD_UID=15fb60ef-a28b-4d10-9de6-d53430151724" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/15fb60ef-a28b-4d10-9de6-d53430151724]: expected pod UID "15fb60ef-a28b-4d10-9de6-d53430151724" but got "d6e91b2c-f8ba-4654-8431-a50545a2c37b" from Kube API Sep 29 17:10:53 crc kubenswrapper[4592]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 29 17:10:53 crc kubenswrapper[4592]: > Sep 29 17:10:53 crc kubenswrapper[4592]: E0929 17:10:53.617898 4592 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Sep 29 17:10:53 crc kubenswrapper[4592]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_15fb60ef-a28b-4d10-9de6-d53430151724_0(44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57" Netns:"/var/run/netns/968825ad-5894-4811-adfe-584956179ded" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=44be61972656b9445f6f0a8c81973fc0bc3a6968039863f0b588c76feb0f9a57;K8S_POD_UID=15fb60ef-a28b-4d10-9de6-d53430151724" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/15fb60ef-a28b-4d10-9de6-d53430151724]: expected pod UID "15fb60ef-a28b-4d10-9de6-d53430151724" but got "d6e91b2c-f8ba-4654-8431-a50545a2c37b" from Kube API Sep 29 17:10:53 crc kubenswrapper[4592]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Sep 29 17:10:53 crc kubenswrapper[4592]: > pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.624836 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.624896 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.625019 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4w22\" (UniqueName: \"kubernetes.io/projected/d6e91b2c-f8ba-4654-8431-a50545a2c37b-kube-api-access-t4w22\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.625115 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.726197 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.726289 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.726323 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.726386 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4w22\" (UniqueName: \"kubernetes.io/projected/d6e91b2c-f8ba-4654-8431-a50545a2c37b-kube-api-access-t4w22\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.726968 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.730790 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.731089 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d6e91b2c-f8ba-4654-8431-a50545a2c37b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.744084 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4w22\" (UniqueName: \"kubernetes.io/projected/d6e91b2c-f8ba-4654-8431-a50545a2c37b-kube-api-access-t4w22\") pod \"openstackclient\" (UID: \"d6e91b2c-f8ba-4654-8431-a50545a2c37b\") " pod="openstack/openstackclient" Sep 29 17:10:53 crc kubenswrapper[4592]: I0929 17:10:53.870559 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.218693 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.222871 4592 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="15fb60ef-a28b-4d10-9de6-d53430151724" podUID="d6e91b2c-f8ba-4654-8431-a50545a2c37b" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.234680 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.337300 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9lxb\" (UniqueName: \"kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb\") pod \"15fb60ef-a28b-4d10-9de6-d53430151724\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.337358 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret\") pod \"15fb60ef-a28b-4d10-9de6-d53430151724\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.337424 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config\") pod \"15fb60ef-a28b-4d10-9de6-d53430151724\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.337534 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle\") pod \"15fb60ef-a28b-4d10-9de6-d53430151724\" (UID: \"15fb60ef-a28b-4d10-9de6-d53430151724\") " Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.341114 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "15fb60ef-a28b-4d10-9de6-d53430151724" (UID: "15fb60ef-a28b-4d10-9de6-d53430151724"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.345374 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15fb60ef-a28b-4d10-9de6-d53430151724" (UID: "15fb60ef-a28b-4d10-9de6-d53430151724"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.347483 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "15fb60ef-a28b-4d10-9de6-d53430151724" (UID: "15fb60ef-a28b-4d10-9de6-d53430151724"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.362364 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb" (OuterVolumeSpecName: "kube-api-access-m9lxb") pod "15fb60ef-a28b-4d10-9de6-d53430151724" (UID: "15fb60ef-a28b-4d10-9de6-d53430151724"). InnerVolumeSpecName "kube-api-access-m9lxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.404801 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.440003 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.440043 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9lxb\" (UniqueName: \"kubernetes.io/projected/15fb60ef-a28b-4d10-9de6-d53430151724-kube-api-access-m9lxb\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.440055 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:54 crc kubenswrapper[4592]: I0929 17:10:54.440064 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/15fb60ef-a28b-4d10-9de6-d53430151724-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.156952 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.195907 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15fb60ef-a28b-4d10-9de6-d53430151724" path="/var/lib/kubelet/pods/15fb60ef-a28b-4d10-9de6-d53430151724/volumes" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.254998 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom\") pod \"b766500c-aa92-44f5-9a9a-aa581878fc5c\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.255602 4592 generic.go:334] "Generic (PLEG): container finished" podID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerID="94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472" exitCode=0 Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.255710 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerDied","Data":"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472"} Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.255710 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7d4fd79db4-cp892" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.255739 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7d4fd79db4-cp892" event={"ID":"b766500c-aa92-44f5-9a9a-aa581878fc5c","Type":"ContainerDied","Data":"fc042eec41e95892258cd9182d0b12c8f76e1b3b040373a7b2226c79ab8590ab"} Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.255758 4592 scope.go:117] "RemoveContainer" containerID="94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.256069 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle\") pod \"b766500c-aa92-44f5-9a9a-aa581878fc5c\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.256392 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data\") pod \"b766500c-aa92-44f5-9a9a-aa581878fc5c\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.256449 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcsdk\" (UniqueName: \"kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk\") pod \"b766500c-aa92-44f5-9a9a-aa581878fc5c\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.256537 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs\") pod \"b766500c-aa92-44f5-9a9a-aa581878fc5c\" (UID: \"b766500c-aa92-44f5-9a9a-aa581878fc5c\") " Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.269370 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.270345 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d6e91b2c-f8ba-4654-8431-a50545a2c37b","Type":"ContainerStarted","Data":"c8874de9b065fa375f637da42a64d4ef9d67e98b1ef293ac6a8771d5b419713a"} Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.276465 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs" (OuterVolumeSpecName: "logs") pod "b766500c-aa92-44f5-9a9a-aa581878fc5c" (UID: "b766500c-aa92-44f5-9a9a-aa581878fc5c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.276763 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk" (OuterVolumeSpecName: "kube-api-access-pcsdk") pod "b766500c-aa92-44f5-9a9a-aa581878fc5c" (UID: "b766500c-aa92-44f5-9a9a-aa581878fc5c"). InnerVolumeSpecName "kube-api-access-pcsdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.281798 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b766500c-aa92-44f5-9a9a-aa581878fc5c" (UID: "b766500c-aa92-44f5-9a9a-aa581878fc5c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.306129 4592 scope.go:117] "RemoveContainer" containerID="b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.337009 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b766500c-aa92-44f5-9a9a-aa581878fc5c" (UID: "b766500c-aa92-44f5-9a9a-aa581878fc5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.344945 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data" (OuterVolumeSpecName: "config-data") pod "b766500c-aa92-44f5-9a9a-aa581878fc5c" (UID: "b766500c-aa92-44f5-9a9a-aa581878fc5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.358903 4592 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.358946 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.358959 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b766500c-aa92-44f5-9a9a-aa581878fc5c-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.358969 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcsdk\" (UniqueName: \"kubernetes.io/projected/b766500c-aa92-44f5-9a9a-aa581878fc5c-kube-api-access-pcsdk\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.358979 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b766500c-aa92-44f5-9a9a-aa581878fc5c-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.429971 4592 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="15fb60ef-a28b-4d10-9de6-d53430151724" podUID="d6e91b2c-f8ba-4654-8431-a50545a2c37b" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.445853 4592 scope.go:117] "RemoveContainer" containerID="94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472" Sep 29 17:10:55 crc kubenswrapper[4592]: E0929 17:10:55.446303 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472\": container with ID starting with 94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472 not found: ID does not exist" containerID="94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.446337 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472"} err="failed to get container status \"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472\": rpc error: code = NotFound desc = could not find container \"94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472\": container with ID starting with 94fef0783aeadddd384febfb398d7cbb1d4dab1aca959757fe758ee82c755472 not found: ID does not exist" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.446364 4592 scope.go:117] "RemoveContainer" containerID="b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c" Sep 29 17:10:55 crc kubenswrapper[4592]: E0929 17:10:55.446608 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c\": container with ID starting with b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c not found: ID does not exist" containerID="b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.446633 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c"} err="failed to get container status \"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c\": rpc error: code = NotFound desc = could not find container \"b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c\": container with ID starting with b3a555105ffbb4a5d8d0a1dbf249107b1efb502c6694507191b3464f703eda5c not found: ID does not exist" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.591363 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.603186 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7d4fd79db4-cp892"] Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.676710 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.676850 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.679211 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.748521 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 17:10:55 crc kubenswrapper[4592]: I0929 17:10:55.748869 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.282883 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.696826 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783486 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783578 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783652 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783692 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qr6c\" (UniqueName: \"kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783743 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783835 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts\") pod \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\" (UID: \"0d84c3f5-9c0f-473b-a83c-53651dc03ece\") " Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.783939 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.784191 4592 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0d84c3f5-9c0f-473b-a83c-53651dc03ece-etc-machine-id\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.788909 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.805673 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c" (OuterVolumeSpecName: "kube-api-access-2qr6c") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "kube-api-access-2qr6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.814720 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts" (OuterVolumeSpecName: "scripts") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.865329 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.886757 4592 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data-custom\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.886789 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.886800 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qr6c\" (UniqueName: \"kubernetes.io/projected/0d84c3f5-9c0f-473b-a83c-53651dc03ece-kube-api-access-2qr6c\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.886810 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.927946 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data" (OuterVolumeSpecName: "config-data") pod "0d84c3f5-9c0f-473b-a83c-53651dc03ece" (UID: "0d84c3f5-9c0f-473b-a83c-53651dc03ece"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:10:56 crc kubenswrapper[4592]: I0929 17:10:56.988858 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d84c3f5-9c0f-473b-a83c-53651dc03ece-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.208622 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" path="/var/lib/kubelet/pods/b766500c-aa92-44f5-9a9a-aa581878fc5c/volumes" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.301635 4592 generic.go:334] "Generic (PLEG): container finished" podID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerID="62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502" exitCode=0 Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.301690 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerDied","Data":"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502"} Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.301722 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0d84c3f5-9c0f-473b-a83c-53651dc03ece","Type":"ContainerDied","Data":"128df520a18c14c5bdbc1198195ce6cc5a69557e801bbcbfe05d0cae889ab354"} Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.301742 4592 scope.go:117] "RemoveContainer" containerID="71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.301887 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.330179 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.338518 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.349845 4592 scope.go:117] "RemoveContainer" containerID="62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.361311 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.365316 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="probe" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365337 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="probe" Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.365365 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="cinder-scheduler" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365373 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="cinder-scheduler" Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.365390 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365397 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.365432 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365439 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365646 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365696 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365707 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="cinder-scheduler" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.365729 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" containerName="probe" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.366881 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.372522 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.376257 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.432330 4592 scope.go:117] "RemoveContainer" containerID="71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75" Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.432756 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75\": container with ID starting with 71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75 not found: ID does not exist" containerID="71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.432798 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75"} err="failed to get container status \"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75\": rpc error: code = NotFound desc = could not find container \"71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75\": container with ID starting with 71f9b2b3ff2f311357168f48828e4bc1d382daee59d52b23797951660c400c75 not found: ID does not exist" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.432825 4592 scope.go:117] "RemoveContainer" containerID="62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502" Sep 29 17:10:57 crc kubenswrapper[4592]: E0929 17:10:57.433233 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502\": container with ID starting with 62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502 not found: ID does not exist" containerID="62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.433261 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502"} err="failed to get container status \"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502\": rpc error: code = NotFound desc = could not find container \"62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502\": container with ID starting with 62d5478bddff7196e5a530d05a7eeda9ff7d94f4a59c793d17791c4948b3e502 not found: ID does not exist" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.500615 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.500968 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndx9x\" (UniqueName: \"kubernetes.io/projected/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-kube-api-access-ndx9x\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.501016 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.501088 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.501110 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-scripts\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.501228 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.602952 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndx9x\" (UniqueName: \"kubernetes.io/projected/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-kube-api-access-ndx9x\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603013 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603054 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603078 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-scripts\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603163 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603172 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.603325 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.606802 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.608584 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-scripts\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.609547 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.615607 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.626611 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndx9x\" (UniqueName: \"kubernetes.io/projected/8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c-kube-api-access-ndx9x\") pod \"cinder-scheduler-0\" (UID: \"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c\") " pod="openstack/cinder-scheduler-0" Sep 29 17:10:57 crc kubenswrapper[4592]: I0929 17:10:57.716445 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Sep 29 17:10:58 crc kubenswrapper[4592]: I0929 17:10:58.313428 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.198111 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d84c3f5-9c0f-473b-a83c-53651dc03ece" path="/var/lib/kubelet/pods/0d84c3f5-9c0f-473b-a83c-53651dc03ece/volumes" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.426715 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c","Type":"ContainerStarted","Data":"5e7225bcac8dd3c86692079d602ec5a1b67b6e6275ec0c0410c8d4f7a7cfa409"} Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.426754 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c","Type":"ContainerStarted","Data":"86d67f74e4e0b4cb16a56871d2592f129d1539016b616e2146c9d7790b5e30e2"} Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.577847 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-74d4767f8f-mgqs7"] Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.579940 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.582522 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.582934 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.583888 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.634467 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-74d4767f8f-mgqs7"] Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663504 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-combined-ca-bundle\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663550 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-log-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663590 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-config-data\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663623 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnqhs\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-kube-api-access-tnqhs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663703 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-etc-swift\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663724 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-run-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663744 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-internal-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.663758 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-public-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.719383 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768167 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-combined-ca-bundle\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768228 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-log-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768286 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-config-data\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768333 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnqhs\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-kube-api-access-tnqhs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768475 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-etc-swift\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-run-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768528 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-internal-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.768545 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-public-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.779945 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-run-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.788211 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f1171449-a884-43cb-b254-c2ee282ea3a0-log-httpd\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.803367 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-public-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.867743 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-etc-swift\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.873353 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-combined-ca-bundle\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.874847 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnqhs\" (UniqueName: \"kubernetes.io/projected/f1171449-a884-43cb-b254-c2ee282ea3a0-kube-api-access-tnqhs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.891044 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-config-data\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.903451 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1171449-a884-43cb-b254-c2ee282ea3a0-internal-tls-certs\") pod \"swift-proxy-74d4767f8f-mgqs7\" (UID: \"f1171449-a884-43cb-b254-c2ee282ea3a0\") " pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:10:59 crc kubenswrapper[4592]: I0929 17:10:59.922830 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.072330 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.077118 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.079975 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7d4fd79db4-cp892" podUID="b766500c-aa92-44f5-9a9a-aa581878fc5c" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.156:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.176130 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.176724 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.176885 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.176934 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.177128 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.177163 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhwzz\" (UniqueName: \"kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.177287 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.177378 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts\") pod \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\" (UID: \"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a\") " Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.178205 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.178533 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.188091 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts" (OuterVolumeSpecName: "scripts") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.188703 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz" (OuterVolumeSpecName: "kube-api-access-jhwzz") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "kube-api-access-jhwzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.261650 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.292093 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.292118 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhwzz\" (UniqueName: \"kubernetes.io/projected/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-kube-api-access-jhwzz\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.292213 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.292281 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.292293 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.319217 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data" (OuterVolumeSpecName: "config-data") pod "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" (UID: "455d573e-2ab3-4174-9b0d-9deaf6fa0a9a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.394390 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.394420 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.491307 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c","Type":"ContainerStarted","Data":"6fcc052cf05a436b69e4c81957a3ba960ec81483a26557969ef2cba7a409f8b3"} Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.502114 4592 generic.go:334] "Generic (PLEG): container finished" podID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerID="9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8" exitCode=137 Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.502522 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerDied","Data":"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8"} Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.502560 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"455d573e-2ab3-4174-9b0d-9deaf6fa0a9a","Type":"ContainerDied","Data":"fef8548e5580e18ce75913ba950be5bd9200bb4538b95a3ed658424b5e4788b8"} Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.502580 4592 scope.go:117] "RemoveContainer" containerID="9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.502757 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.573199 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.573178302 podStartE2EDuration="3.573178302s" podCreationTimestamp="2025-09-29 17:10:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:11:00.515568263 +0000 UTC m=+1190.663345934" watchObservedRunningTime="2025-09-29 17:11:00.573178302 +0000 UTC m=+1190.720956003" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.590448 4592 scope.go:117] "RemoveContainer" containerID="034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.602300 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.613138 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.631590 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.632009 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="ceilometer-notification-agent" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632028 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="ceilometer-notification-agent" Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.632055 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="sg-core" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632061 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="sg-core" Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.632079 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="proxy-httpd" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632086 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="proxy-httpd" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632278 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="sg-core" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632302 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="proxy-httpd" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.632318 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" containerName="ceilometer-notification-agent" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.633954 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.638946 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.639382 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.646101 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.693514 4592 scope.go:117] "RemoveContainer" containerID="e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.702859 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.702903 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.702928 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.702968 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.702983 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.703015 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpr8p\" (UniqueName: \"kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.703082 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.705975 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-74d4767f8f-mgqs7"] Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.804847 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.804926 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.804972 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.804993 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.805048 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.805064 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.805100 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpr8p\" (UniqueName: \"kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.811057 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.813220 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.813735 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.828707 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.835167 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.845824 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.854236 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpr8p\" (UniqueName: \"kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p\") pod \"ceilometer-0\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " pod="openstack/ceilometer-0" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.915969 4592 scope.go:117] "RemoveContainer" containerID="9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8" Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.916815 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8\": container with ID starting with 9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8 not found: ID does not exist" containerID="9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.916857 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8"} err="failed to get container status \"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8\": rpc error: code = NotFound desc = could not find container \"9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8\": container with ID starting with 9150eb06d02410f8effc271eb94b1eb2ddfaaf6d9e5e3f4d2c99bcab99bcbdc8 not found: ID does not exist" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.916883 4592 scope.go:117] "RemoveContainer" containerID="034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366" Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.917154 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366\": container with ID starting with 034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366 not found: ID does not exist" containerID="034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.917196 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366"} err="failed to get container status \"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366\": rpc error: code = NotFound desc = could not find container \"034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366\": container with ID starting with 034eeb49c09bdea97e330e8c069e95200c43dd9ad619904afedbba16439c6366 not found: ID does not exist" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.917225 4592 scope.go:117] "RemoveContainer" containerID="e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c" Sep 29 17:11:00 crc kubenswrapper[4592]: E0929 17:11:00.917590 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c\": container with ID starting with e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c not found: ID does not exist" containerID="e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.917613 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c"} err="failed to get container status \"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c\": rpc error: code = NotFound desc = could not find container \"e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c\": container with ID starting with e1e3313a3ac381b2162623c7567e05e18a0fa00b0bcc0c7b4b4cb6efdf81903c not found: ID does not exist" Sep 29 17:11:00 crc kubenswrapper[4592]: I0929 17:11:00.982353 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.210439 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="455d573e-2ab3-4174-9b0d-9deaf6fa0a9a" path="/var/lib/kubelet/pods/455d573e-2ab3-4174-9b0d-9deaf6fa0a9a/volumes" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.459498 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:01 crc kubenswrapper[4592]: W0929 17:11:01.503411 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d689bd9_9bed_4f8f_a232_f91845d55937.slice/crio-c1beab9ddb1332c3b525b80986967a249f5ce99dfe417130f7a54801509eabb9 WatchSource:0}: Error finding container c1beab9ddb1332c3b525b80986967a249f5ce99dfe417130f7a54801509eabb9: Status 404 returned error can't find the container with id c1beab9ddb1332c3b525b80986967a249f5ce99dfe417130f7a54801509eabb9 Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.541283 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d4767f8f-mgqs7" event={"ID":"f1171449-a884-43cb-b254-c2ee282ea3a0","Type":"ContainerStarted","Data":"3f68d55fe36de3226cb6500482a319509479f0aad69878adfc40aa20a297ae5f"} Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.541323 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.541337 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d4767f8f-mgqs7" event={"ID":"f1171449-a884-43cb-b254-c2ee282ea3a0","Type":"ContainerStarted","Data":"b7aec21076dc19c2e649680f0206501212d1910b64ab186d8b49094a347c92d0"} Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.541347 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-74d4767f8f-mgqs7" event={"ID":"f1171449-a884-43cb-b254-c2ee282ea3a0","Type":"ContainerStarted","Data":"b4f169aea0ca667123d2da2a1e48ef91a6faa8b688ec73e0a771f8646710905f"} Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.541365 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.564450 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-74d4767f8f-mgqs7" podStartSLOduration=2.56443024 podStartE2EDuration="2.56443024s" podCreationTimestamp="2025-09-29 17:10:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:11:01.562078574 +0000 UTC m=+1191.709856265" watchObservedRunningTime="2025-09-29 17:11:01.56443024 +0000 UTC m=+1191.712207921" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.913733 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-g8ghq"] Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.914817 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.933720 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wndkn\" (UniqueName: \"kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn\") pod \"nova-api-db-create-g8ghq\" (UID: \"ab0d70af-d7f7-4c70-813f-e26252c411a8\") " pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:01 crc kubenswrapper[4592]: I0929 17:11:01.933726 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g8ghq"] Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.013264 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-x2l72"] Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.017370 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.043254 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x2l72"] Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.119970 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgkrt\" (UniqueName: \"kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt\") pod \"nova-cell0-db-create-x2l72\" (UID: \"00235364-b815-42c6-80e2-f876dd991541\") " pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.120576 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wndkn\" (UniqueName: \"kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn\") pod \"nova-api-db-create-g8ghq\" (UID: \"ab0d70af-d7f7-4c70-813f-e26252c411a8\") " pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.136238 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wndkn\" (UniqueName: \"kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn\") pod \"nova-api-db-create-g8ghq\" (UID: \"ab0d70af-d7f7-4c70-813f-e26252c411a8\") " pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.224291 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgkrt\" (UniqueName: \"kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt\") pod \"nova-cell0-db-create-x2l72\" (UID: \"00235364-b815-42c6-80e2-f876dd991541\") " pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.232712 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-4c8jd"] Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.234304 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.240507 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4c8jd"] Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.255813 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgkrt\" (UniqueName: \"kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt\") pod \"nova-cell0-db-create-x2l72\" (UID: \"00235364-b815-42c6-80e2-f876dd991541\") " pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.256937 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.330399 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcmtf\" (UniqueName: \"kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf\") pod \"nova-cell1-db-create-4c8jd\" (UID: \"b54f12ab-68c8-47eb-9bc4-93adee895d06\") " pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.351802 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.432114 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcmtf\" (UniqueName: \"kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf\") pod \"nova-cell1-db-create-4c8jd\" (UID: \"b54f12ab-68c8-47eb-9bc4-93adee895d06\") " pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.466625 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcmtf\" (UniqueName: \"kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf\") pod \"nova-cell1-db-create-4c8jd\" (UID: \"b54f12ab-68c8-47eb-9bc4-93adee895d06\") " pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.578374 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.593574 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerStarted","Data":"c1beab9ddb1332c3b525b80986967a249f5ce99dfe417130f7a54801509eabb9"} Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.720380 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Sep 29 17:11:02 crc kubenswrapper[4592]: I0929 17:11:02.861405 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-g8ghq"] Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.102000 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-x2l72"] Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.253384 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-4c8jd"] Sep 29 17:11:03 crc kubenswrapper[4592]: W0929 17:11:03.301390 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb54f12ab_68c8_47eb_9bc4_93adee895d06.slice/crio-d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58 WatchSource:0}: Error finding container d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58: Status 404 returned error can't find the container with id d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58 Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.633699 4592 generic.go:334] "Generic (PLEG): container finished" podID="ab0d70af-d7f7-4c70-813f-e26252c411a8" containerID="b1e1f5a4cd45518c2d74384ca679f452e915a26f2edbfcb9590d368211f303aa" exitCode=0 Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.633980 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g8ghq" event={"ID":"ab0d70af-d7f7-4c70-813f-e26252c411a8","Type":"ContainerDied","Data":"b1e1f5a4cd45518c2d74384ca679f452e915a26f2edbfcb9590d368211f303aa"} Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.634621 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g8ghq" event={"ID":"ab0d70af-d7f7-4c70-813f-e26252c411a8","Type":"ContainerStarted","Data":"759dd9b0eaea42d27fb6da8964da9db2f7aeb04aa2d085bea561ce10d9e917ec"} Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.636738 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4c8jd" event={"ID":"b54f12ab-68c8-47eb-9bc4-93adee895d06","Type":"ContainerStarted","Data":"d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58"} Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.639851 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerStarted","Data":"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3"} Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.641865 4592 generic.go:334] "Generic (PLEG): container finished" podID="00235364-b815-42c6-80e2-f876dd991541" containerID="8821f7c62ef22176df069ec61787a80bfbf6ac718ca197b2d84593dd465b7d66" exitCode=0 Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.641904 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x2l72" event={"ID":"00235364-b815-42c6-80e2-f876dd991541","Type":"ContainerDied","Data":"8821f7c62ef22176df069ec61787a80bfbf6ac718ca197b2d84593dd465b7d66"} Sep 29 17:11:03 crc kubenswrapper[4592]: I0929 17:11:03.641927 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x2l72" event={"ID":"00235364-b815-42c6-80e2-f876dd991541","Type":"ContainerStarted","Data":"a5f67b2c9d582b02e0b6911a1cebbc23430854d8997e2136d55308b4890a26ba"} Sep 29 17:11:04 crc kubenswrapper[4592]: I0929 17:11:04.508373 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cinder-api-0" podUID="aa1f23ba-8aae-4a33-8946-7cfcd7087e6e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.166:8776/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:11:04 crc kubenswrapper[4592]: I0929 17:11:04.681823 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerStarted","Data":"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f"} Sep 29 17:11:04 crc kubenswrapper[4592]: I0929 17:11:04.693323 4592 generic.go:334] "Generic (PLEG): container finished" podID="b54f12ab-68c8-47eb-9bc4-93adee895d06" containerID="583162e92e8f63d66f73dbfd38668cc7bf9edc849791d912cc186ece00a11d98" exitCode=0 Sep 29 17:11:04 crc kubenswrapper[4592]: I0929 17:11:04.693957 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4c8jd" event={"ID":"b54f12ab-68c8-47eb-9bc4-93adee895d06","Type":"ContainerDied","Data":"583162e92e8f63d66f73dbfd38668cc7bf9edc849791d912cc186ece00a11d98"} Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.205767 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.215537 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.320277 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgkrt\" (UniqueName: \"kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt\") pod \"00235364-b815-42c6-80e2-f876dd991541\" (UID: \"00235364-b815-42c6-80e2-f876dd991541\") " Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.320426 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wndkn\" (UniqueName: \"kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn\") pod \"ab0d70af-d7f7-4c70-813f-e26252c411a8\" (UID: \"ab0d70af-d7f7-4c70-813f-e26252c411a8\") " Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.328101 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn" (OuterVolumeSpecName: "kube-api-access-wndkn") pod "ab0d70af-d7f7-4c70-813f-e26252c411a8" (UID: "ab0d70af-d7f7-4c70-813f-e26252c411a8"). InnerVolumeSpecName "kube-api-access-wndkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.328748 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt" (OuterVolumeSpecName: "kube-api-access-kgkrt") pod "00235364-b815-42c6-80e2-f876dd991541" (UID: "00235364-b815-42c6-80e2-f876dd991541"). InnerVolumeSpecName "kube-api-access-kgkrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.425295 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgkrt\" (UniqueName: \"kubernetes.io/projected/00235364-b815-42c6-80e2-f876dd991541-kube-api-access-kgkrt\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.425327 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wndkn\" (UniqueName: \"kubernetes.io/projected/ab0d70af-d7f7-4c70-813f-e26252c411a8-kube-api-access-wndkn\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.714650 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerStarted","Data":"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0"} Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.719002 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-x2l72" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.722991 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-x2l72" event={"ID":"00235364-b815-42c6-80e2-f876dd991541","Type":"ContainerDied","Data":"a5f67b2c9d582b02e0b6911a1cebbc23430854d8997e2136d55308b4890a26ba"} Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.723040 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5f67b2c9d582b02e0b6911a1cebbc23430854d8997e2136d55308b4890a26ba" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.731648 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-g8ghq" event={"ID":"ab0d70af-d7f7-4c70-813f-e26252c411a8","Type":"ContainerDied","Data":"759dd9b0eaea42d27fb6da8964da9db2f7aeb04aa2d085bea561ce10d9e917ec"} Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.731692 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="759dd9b0eaea42d27fb6da8964da9db2f7aeb04aa2d085bea561ce10d9e917ec" Sep 29 17:11:05 crc kubenswrapper[4592]: I0929 17:11:05.731850 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-g8ghq" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.156058 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.249077 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcmtf\" (UniqueName: \"kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf\") pod \"b54f12ab-68c8-47eb-9bc4-93adee895d06\" (UID: \"b54f12ab-68c8-47eb-9bc4-93adee895d06\") " Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.270446 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf" (OuterVolumeSpecName: "kube-api-access-bcmtf") pod "b54f12ab-68c8-47eb-9bc4-93adee895d06" (UID: "b54f12ab-68c8-47eb-9bc4-93adee895d06"). InnerVolumeSpecName "kube-api-access-bcmtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.343227 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="aa1f23ba-8aae-4a33-8946-7cfcd7087e6e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.166:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.351875 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcmtf\" (UniqueName: \"kubernetes.io/projected/b54f12ab-68c8-47eb-9bc4-93adee895d06-kube-api-access-bcmtf\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.744177 4592 generic.go:334] "Generic (PLEG): container finished" podID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerID="5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab" exitCode=137 Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.744257 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerDied","Data":"5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab"} Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.744600 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerStarted","Data":"cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929"} Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.747327 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-4c8jd" event={"ID":"b54f12ab-68c8-47eb-9bc4-93adee895d06","Type":"ContainerDied","Data":"d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58"} Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.747360 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d34555d571eb2ac73d4331f335168a4f3790d50b780d5138af6f6c91dc549b58" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.747456 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-4c8jd" Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.755812 4592 generic.go:334] "Generic (PLEG): container finished" podID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerID="5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d" exitCode=137 Sep 29 17:11:06 crc kubenswrapper[4592]: I0929 17:11:06.755840 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerDied","Data":"5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d"} Sep 29 17:11:07 crc kubenswrapper[4592]: I0929 17:11:07.769114 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerStarted","Data":"a5bd2ab6ff6d21a98c48e8f0a10906fee7808ff984fbf29492d97a299aa60c56"} Sep 29 17:11:07 crc kubenswrapper[4592]: I0929 17:11:07.973471 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Sep 29 17:11:09 crc kubenswrapper[4592]: I0929 17:11:09.928218 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:11:09 crc kubenswrapper[4592]: I0929 17:11:09.931727 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-74d4767f8f-mgqs7" Sep 29 17:11:10 crc kubenswrapper[4592]: I0929 17:11:10.992589 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:10 crc kubenswrapper[4592]: I0929 17:11:10.993347 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" containerID="cri-o://7a4b6601de02b94df7230c8b23457160ce1a4cd3fc6d168303e2f92be4b954f5" gracePeriod=30 Sep 29 17:11:10 crc kubenswrapper[4592]: I0929 17:11:10.993493 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-log" containerID="cri-o://0a28d90387b116cbe9bda7918e78a00a6ec41a55f1104976d4f631f6db97691c" gracePeriod=30 Sep 29 17:11:11 crc kubenswrapper[4592]: I0929 17:11:11.003520 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.165:9292/healthcheck\": EOF" Sep 29 17:11:11 crc kubenswrapper[4592]: I0929 17:11:11.003569 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.165:9292/healthcheck\": EOF" Sep 29 17:11:11 crc kubenswrapper[4592]: I0929 17:11:11.003616 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-internal-api-0" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.165:9292/healthcheck\": EOF" Sep 29 17:11:11 crc kubenswrapper[4592]: I0929 17:11:11.814063 4592 generic.go:334] "Generic (PLEG): container finished" podID="9e52b299-d531-4fa5-8871-f77288a223b7" containerID="0a28d90387b116cbe9bda7918e78a00a6ec41a55f1104976d4f631f6db97691c" exitCode=143 Sep 29 17:11:11 crc kubenswrapper[4592]: I0929 17:11:11.814377 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerDied","Data":"0a28d90387b116cbe9bda7918e78a00a6ec41a55f1104976d4f631f6db97691c"} Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.145768 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-5abd-account-create-sqj9l"] Sep 29 17:11:12 crc kubenswrapper[4592]: E0929 17:11:12.146110 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00235364-b815-42c6-80e2-f876dd991541" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146121 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="00235364-b815-42c6-80e2-f876dd991541" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: E0929 17:11:12.146130 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab0d70af-d7f7-4c70-813f-e26252c411a8" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146136 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab0d70af-d7f7-4c70-813f-e26252c411a8" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: E0929 17:11:12.146187 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b54f12ab-68c8-47eb-9bc4-93adee895d06" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146194 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b54f12ab-68c8-47eb-9bc4-93adee895d06" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146353 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab0d70af-d7f7-4c70-813f-e26252c411a8" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146362 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="00235364-b815-42c6-80e2-f876dd991541" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146374 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b54f12ab-68c8-47eb-9bc4-93adee895d06" containerName="mariadb-database-create" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.146892 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.152033 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.184244 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5abd-account-create-sqj9l"] Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.296572 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6npkh\" (UniqueName: \"kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh\") pod \"nova-api-5abd-account-create-sqj9l\" (UID: \"aec6f146-7664-4f01-ab13-7f4d9bad57f2\") " pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.337466 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-94eb-account-create-wfwp5"] Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.342226 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.344255 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.355281 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-94eb-account-create-wfwp5"] Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.398448 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6npkh\" (UniqueName: \"kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh\") pod \"nova-api-5abd-account-create-sqj9l\" (UID: \"aec6f146-7664-4f01-ab13-7f4d9bad57f2\") " pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.448633 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6npkh\" (UniqueName: \"kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh\") pod \"nova-api-5abd-account-create-sqj9l\" (UID: \"aec6f146-7664-4f01-ab13-7f4d9bad57f2\") " pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.465740 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.501338 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwwjh\" (UniqueName: \"kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh\") pod \"nova-cell0-94eb-account-create-wfwp5\" (UID: \"09df60bf-ab19-417c-8910-c666047d0ec9\") " pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.556461 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-bee2-account-create-rrx9l"] Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.559444 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.571550 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.589243 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bee2-account-create-rrx9l"] Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.612740 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwwjh\" (UniqueName: \"kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh\") pod \"nova-cell0-94eb-account-create-wfwp5\" (UID: \"09df60bf-ab19-417c-8910-c666047d0ec9\") " pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.638126 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwwjh\" (UniqueName: \"kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh\") pod \"nova-cell0-94eb-account-create-wfwp5\" (UID: \"09df60bf-ab19-417c-8910-c666047d0ec9\") " pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.659705 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.714345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn4jj\" (UniqueName: \"kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj\") pod \"nova-cell1-bee2-account-create-rrx9l\" (UID: \"0167bdf3-7113-4993-b294-d33073462e4d\") " pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.816500 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn4jj\" (UniqueName: \"kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj\") pod \"nova-cell1-bee2-account-create-rrx9l\" (UID: \"0167bdf3-7113-4993-b294-d33073462e4d\") " pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.841613 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn4jj\" (UniqueName: \"kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj\") pod \"nova-cell1-bee2-account-create-rrx9l\" (UID: \"0167bdf3-7113-4993-b294-d33073462e4d\") " pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:12 crc kubenswrapper[4592]: I0929 17:11:12.911436 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.535798 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.986439 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.986676 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" containerID="cri-o://2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30" gracePeriod=30 Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.986800 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" containerID="cri-o://01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88" gracePeriod=30 Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.996262 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.164:9292/healthcheck\": EOF" Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.996338 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.164:9292/healthcheck\": EOF" Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.996411 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.164:9292/healthcheck\": EOF" Sep 29 17:11:13 crc kubenswrapper[4592]: I0929 17:11:13.996483 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.164:9292/healthcheck\": EOF" Sep 29 17:11:14 crc kubenswrapper[4592]: I0929 17:11:14.843599 4592 generic.go:334] "Generic (PLEG): container finished" podID="28597e2b-61b9-4213-9980-deb0f1041e27" containerID="2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30" exitCode=143 Sep 29 17:11:14 crc kubenswrapper[4592]: I0929 17:11:14.843778 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerDied","Data":"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30"} Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.312247 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.312299 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.315845 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.480933 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.480968 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:11:16 crc kubenswrapper[4592]: I0929 17:11:16.482971 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:11:16 crc kubenswrapper[4592]: E0929 17:11:16.693966 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified" Sep 29 17:11:16 crc kubenswrapper[4592]: E0929 17:11:16.694463 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstackclient,Image:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,Command:[/bin/sleep],Args:[infinity],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n678h7bh549hbch554h57h565h576h86hc9h5b7h8hb5h66ch77h65ch9ch596h646h9fh554h77h669h64ch54chb4h568h64fhb8h54dh665h5dcq,ValueFrom:nil,},EnvVar{Name:OS_CLOUD,Value:default,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_HOST,Value:metric-storage-prometheus.openstack.svc,ValueFrom:nil,},EnvVar{Name:PROMETHEUS_PORT,Value:9090,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:openstack-config,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/.config/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/home/cloud-admin/cloudrc,SubPath:cloudrc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t4w22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42401,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:*42401,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstackclient_openstack(d6e91b2c-f8ba-4654-8431-a50545a2c37b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:11:16 crc kubenswrapper[4592]: E0929 17:11:16.695683 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstackclient" podUID="d6e91b2c-f8ba-4654-8431-a50545a2c37b" Sep 29 17:11:16 crc kubenswrapper[4592]: E0929 17:11:16.874783 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"openstackclient\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified\\\"\"" pod="openstack/openstackclient" podUID="d6e91b2c-f8ba-4654-8431-a50545a2c37b" Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.305896 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-94eb-account-create-wfwp5"] Sep 29 17:11:17 crc kubenswrapper[4592]: W0929 17:11:17.308661 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09df60bf_ab19_417c_8910_c666047d0ec9.slice/crio-dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375 WatchSource:0}: Error finding container dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375: Status 404 returned error can't find the container with id dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.333102 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-5abd-account-create-sqj9l"] Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.427906 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-bee2-account-create-rrx9l"] Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.887659 4592 generic.go:334] "Generic (PLEG): container finished" podID="09df60bf-ab19-417c-8910-c666047d0ec9" containerID="9405561b29682530ed59529f35893507c857d46203a082f0e6cbcc2d1efb8d68" exitCode=0 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.887951 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-94eb-account-create-wfwp5" event={"ID":"09df60bf-ab19-417c-8910-c666047d0ec9","Type":"ContainerDied","Data":"9405561b29682530ed59529f35893507c857d46203a082f0e6cbcc2d1efb8d68"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.887976 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-94eb-account-create-wfwp5" event={"ID":"09df60bf-ab19-417c-8910-c666047d0ec9","Type":"ContainerStarted","Data":"dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928365 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerStarted","Data":"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928451 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-central-agent" containerID="cri-o://2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3" gracePeriod=30 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928587 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="proxy-httpd" containerID="cri-o://4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2" gracePeriod=30 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928620 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="sg-core" containerID="cri-o://28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0" gracePeriod=30 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928647 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-notification-agent" containerID="cri-o://f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f" gracePeriod=30 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.928946 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.939656 4592 generic.go:334] "Generic (PLEG): container finished" podID="aec6f146-7664-4f01-ab13-7f4d9bad57f2" containerID="924a1646f5369fa836964d520b6dc57969883c479ce67698c3bb777ade64117a" exitCode=0 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.939732 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5abd-account-create-sqj9l" event={"ID":"aec6f146-7664-4f01-ab13-7f4d9bad57f2","Type":"ContainerDied","Data":"924a1646f5369fa836964d520b6dc57969883c479ce67698c3bb777ade64117a"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.939765 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5abd-account-create-sqj9l" event={"ID":"aec6f146-7664-4f01-ab13-7f4d9bad57f2","Type":"ContainerStarted","Data":"43041d031edacd246c3ca1089750806c072843d6efa78ed3fb2168ca66356909"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.943274 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bee2-account-create-rrx9l" event={"ID":"0167bdf3-7113-4993-b294-d33073462e4d","Type":"ContainerStarted","Data":"89c7169e8b7914ce46fc326a44ce5da5fb42cc201036a0aefa6d8b12040b0db6"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.946497 4592 generic.go:334] "Generic (PLEG): container finished" podID="9e52b299-d531-4fa5-8871-f77288a223b7" containerID="7a4b6601de02b94df7230c8b23457160ce1a4cd3fc6d168303e2f92be4b954f5" exitCode=0 Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.946538 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerDied","Data":"7a4b6601de02b94df7230c8b23457160ce1a4cd3fc6d168303e2f92be4b954f5"} Sep 29 17:11:17 crc kubenswrapper[4592]: I0929 17:11:17.970780 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.696475649 podStartE2EDuration="17.970763184s" podCreationTimestamp="2025-09-29 17:11:00 +0000 UTC" firstStartedPulling="2025-09-29 17:11:01.533243019 +0000 UTC m=+1191.681020700" lastFinishedPulling="2025-09-29 17:11:16.807530554 +0000 UTC m=+1206.955308235" observedRunningTime="2025-09-29 17:11:17.961680611 +0000 UTC m=+1208.109458292" watchObservedRunningTime="2025-09-29 17:11:17.970763184 +0000 UTC m=+1208.118540865" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.133379 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.242502 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.242558 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.244571 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.244632 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.244939 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzs5r\" (UniqueName: \"kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.244986 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.245026 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.245115 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs\") pod \"9e52b299-d531-4fa5-8871-f77288a223b7\" (UID: \"9e52b299-d531-4fa5-8871-f77288a223b7\") " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.246914 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs" (OuterVolumeSpecName: "logs") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.247521 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.254441 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts" (OuterVolumeSpecName: "scripts") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.295838 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.301123 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r" (OuterVolumeSpecName: "kube-api-access-wzs5r") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "kube-api-access-wzs5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.347574 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.349313 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.349343 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.349353 4592 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9e52b299-d531-4fa5-8871-f77288a223b7-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.349364 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzs5r\" (UniqueName: \"kubernetes.io/projected/9e52b299-d531-4fa5-8871-f77288a223b7-kube-api-access-wzs5r\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.389084 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.413305 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.426938 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data" (OuterVolumeSpecName: "config-data") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.427739 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9e52b299-d531-4fa5-8871-f77288a223b7" (UID: "9e52b299-d531-4fa5-8871-f77288a223b7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.450950 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.450989 4592 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.451036 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.451045 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e52b299-d531-4fa5-8871-f77288a223b7-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.915497 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.970413 4592 generic.go:334] "Generic (PLEG): container finished" podID="28597e2b-61b9-4213-9980-deb0f1041e27" containerID="01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88" exitCode=0 Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.970709 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.970757 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerDied","Data":"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88"} Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.970913 4592 scope.go:117] "RemoveContainer" containerID="01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88" Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.971566 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"28597e2b-61b9-4213-9980-deb0f1041e27","Type":"ContainerDied","Data":"c5ce67a9092c8fbf9d5ce3f414b67efeb45f307227b3c13646a20c1c35a476c2"} Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.975844 4592 generic.go:334] "Generic (PLEG): container finished" podID="0167bdf3-7113-4993-b294-d33073462e4d" containerID="43f4f54b807fc9b253229dfc0397bb3a0f82d65b1362e4ca1de24653bc81f8cc" exitCode=0 Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.975941 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bee2-account-create-rrx9l" event={"ID":"0167bdf3-7113-4993-b294-d33073462e4d","Type":"ContainerDied","Data":"43f4f54b807fc9b253229dfc0397bb3a0f82d65b1362e4ca1de24653bc81f8cc"} Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988605 4592 generic.go:334] "Generic (PLEG): container finished" podID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerID="4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2" exitCode=0 Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988645 4592 generic.go:334] "Generic (PLEG): container finished" podID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerID="28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0" exitCode=2 Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988657 4592 generic.go:334] "Generic (PLEG): container finished" podID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerID="2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3" exitCode=0 Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988746 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerDied","Data":"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2"} Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988799 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerDied","Data":"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0"} Sep 29 17:11:18 crc kubenswrapper[4592]: I0929 17:11:18.988813 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerDied","Data":"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3"} Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.006335 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9e52b299-d531-4fa5-8871-f77288a223b7","Type":"ContainerDied","Data":"a5e6e71a863101fd2eb00e2b14e1f3355e4f397c77569736634f7f6b15142c9d"} Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.006446 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.030821 4592 scope.go:117] "RemoveContainer" containerID="2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.061607 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.061758 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.061892 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.061978 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.062131 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.062189 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lxmc\" (UniqueName: \"kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.062295 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.062358 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts\") pod \"28597e2b-61b9-4213-9980-deb0f1041e27\" (UID: \"28597e2b-61b9-4213-9980-deb0f1041e27\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.063025 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs" (OuterVolumeSpecName: "logs") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.064122 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.066672 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.074886 4592 scope.go:117] "RemoveContainer" containerID="01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.079761 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88\": container with ID starting with 01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88 not found: ID does not exist" containerID="01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.079803 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88"} err="failed to get container status \"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88\": rpc error: code = NotFound desc = could not find container \"01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88\": container with ID starting with 01ee4f45b84bc54cce5677688eaa039c683cec0cf9a8782cf0cbb6038eb6be88 not found: ID does not exist" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.079831 4592 scope.go:117] "RemoveContainer" containerID="2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.080453 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30\": container with ID starting with 2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30 not found: ID does not exist" containerID="2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.080480 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30"} err="failed to get container status \"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30\": rpc error: code = NotFound desc = could not find container \"2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30\": container with ID starting with 2684d22dfaba0e26abacc98a21ae9d6e49089456e63dd4da12f143df9af55d30 not found: ID does not exist" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.080501 4592 scope.go:117] "RemoveContainer" containerID="7a4b6601de02b94df7230c8b23457160ce1a4cd3fc6d168303e2f92be4b954f5" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.086119 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.091962 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc" (OuterVolumeSpecName: "kube-api-access-9lxmc") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "kube-api-access-9lxmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.093041 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts" (OuterVolumeSpecName: "scripts") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.093267 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.102923 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.161399 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.175469 4592 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/28597e2b-61b9-4213-9980-deb0f1041e27-httpd-run\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.175502 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.175527 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.175538 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.175558 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lxmc\" (UniqueName: \"kubernetes.io/projected/28597e2b-61b9-4213-9980-deb0f1041e27-kube-api-access-9lxmc\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.224438 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" path="/var/lib/kubelet/pods/9e52b299-d531-4fa5-8871-f77288a223b7/volumes" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231247 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.231599 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231610 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.231622 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231628 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.231647 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231653 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.231670 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231677 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231865 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231883 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-httpd" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231896 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.231907 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e52b299-d531-4fa5-8871-f77288a223b7" containerName="glance-log" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.233574 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.237119 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.244947 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.245519 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.296346 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.296489 4592 scope.go:117] "RemoveContainer" containerID="0a28d90387b116cbe9bda7918e78a00a6ec41a55f1104976d4f631f6db97691c" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.308331 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.317845 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data" (OuterVolumeSpecName: "config-data") pod "28597e2b-61b9-4213-9980-deb0f1041e27" (UID: "28597e2b-61b9-4213-9980-deb0f1041e27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.378932 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379042 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bxjb\" (UniqueName: \"kubernetes.io/projected/e42af79d-fc77-4451-8550-cbd866e1eabe-kube-api-access-8bxjb\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379074 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379125 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379260 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379303 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-logs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379336 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379386 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379434 4592 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379460 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.379470 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28597e2b-61b9-4213-9980-deb0f1041e27-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.481289 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.481988 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.482034 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.482185 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bxjb\" (UniqueName: \"kubernetes.io/projected/e42af79d-fc77-4451-8550-cbd866e1eabe-kube-api-access-8bxjb\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.482251 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.483696 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.483818 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.483956 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.484007 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-logs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.484534 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-logs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.491345 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.491729 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.491770 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e42af79d-fc77-4451-8550-cbd866e1eabe-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.497490 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.505322 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42af79d-fc77-4451-8550-cbd866e1eabe-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.507924 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bxjb\" (UniqueName: \"kubernetes.io/projected/e42af79d-fc77-4451-8550-cbd866e1eabe-kube-api-access-8bxjb\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.529473 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.543322 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"e42af79d-fc77-4451-8550-cbd866e1eabe\") " pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.611429 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.661831 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.687160 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.688074 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6npkh\" (UniqueName: \"kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh\") pod \"aec6f146-7664-4f01-ab13-7f4d9bad57f2\" (UID: \"aec6f146-7664-4f01-ab13-7f4d9bad57f2\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.722066 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.731658 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh" (OuterVolumeSpecName: "kube-api-access-6npkh") pod "aec6f146-7664-4f01-ab13-7f4d9bad57f2" (UID: "aec6f146-7664-4f01-ab13-7f4d9bad57f2"). InnerVolumeSpecName "kube-api-access-6npkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.750648 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.751050 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec6f146-7664-4f01-ab13-7f4d9bad57f2" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.751069 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec6f146-7664-4f01-ab13-7f4d9bad57f2" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: E0929 17:11:19.751084 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09df60bf-ab19-417c-8910-c666047d0ec9" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.751092 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="09df60bf-ab19-417c-8910-c666047d0ec9" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.753658 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="aec6f146-7664-4f01-ab13-7f4d9bad57f2" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.753690 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="09df60bf-ab19-417c-8910-c666047d0ec9" containerName="mariadb-account-create" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.754737 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.758347 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.758875 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.763645 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.793258 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwwjh\" (UniqueName: \"kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh\") pod \"09df60bf-ab19-417c-8910-c666047d0ec9\" (UID: \"09df60bf-ab19-417c-8910-c666047d0ec9\") " Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.793715 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6npkh\" (UniqueName: \"kubernetes.io/projected/aec6f146-7664-4f01-ab13-7f4d9bad57f2-kube-api-access-6npkh\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.811771 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh" (OuterVolumeSpecName: "kube-api-access-rwwjh") pod "09df60bf-ab19-417c-8910-c666047d0ec9" (UID: "09df60bf-ab19-417c-8910-c666047d0ec9"). InnerVolumeSpecName "kube-api-access-rwwjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899484 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899557 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-scripts\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899654 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899711 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkfm4\" (UniqueName: \"kubernetes.io/projected/f396e95c-bf51-4e4d-9dc7-76188423316b-kube-api-access-lkfm4\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899734 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899766 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-config-data\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899842 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.899898 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-logs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:19 crc kubenswrapper[4592]: I0929 17:11:19.900036 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwwjh\" (UniqueName: \"kubernetes.io/projected/09df60bf-ab19-417c-8910-c666047d0ec9-kube-api-access-rwwjh\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003166 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003494 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkfm4\" (UniqueName: \"kubernetes.io/projected/f396e95c-bf51-4e4d-9dc7-76188423316b-kube-api-access-lkfm4\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003524 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003556 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-config-data\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003603 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-logs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003703 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.003747 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-scripts\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.008235 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-logs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.009822 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.010127 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f396e95c-bf51-4e4d-9dc7-76188423316b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.014350 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.020266 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.028505 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-config-data\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.045219 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkfm4\" (UniqueName: \"kubernetes.io/projected/f396e95c-bf51-4e4d-9dc7-76188423316b-kube-api-access-lkfm4\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.063390 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f396e95c-bf51-4e4d-9dc7-76188423316b-scripts\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.107439 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-94eb-account-create-wfwp5" event={"ID":"09df60bf-ab19-417c-8910-c666047d0ec9","Type":"ContainerDied","Data":"dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375"} Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.107501 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbdca50334ac9362d801390b4c758472d456e46f39437f29e4689b9e42447375" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.107575 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-94eb-account-create-wfwp5" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.111323 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-5abd-account-create-sqj9l" event={"ID":"aec6f146-7664-4f01-ab13-7f4d9bad57f2","Type":"ContainerDied","Data":"43041d031edacd246c3ca1089750806c072843d6efa78ed3fb2168ca66356909"} Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.111371 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43041d031edacd246c3ca1089750806c072843d6efa78ed3fb2168ca66356909" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.111468 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-5abd-account-create-sqj9l" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.132587 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-external-api-0\" (UID: \"f396e95c-bf51-4e4d-9dc7-76188423316b\") " pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.155362 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.379559 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.494826 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.624115 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn4jj\" (UniqueName: \"kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj\") pod \"0167bdf3-7113-4993-b294-d33073462e4d\" (UID: \"0167bdf3-7113-4993-b294-d33073462e4d\") " Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.632397 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj" (OuterVolumeSpecName: "kube-api-access-wn4jj") pod "0167bdf3-7113-4993-b294-d33073462e4d" (UID: "0167bdf3-7113-4993-b294-d33073462e4d"). InnerVolumeSpecName "kube-api-access-wn4jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.726454 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn4jj\" (UniqueName: \"kubernetes.io/projected/0167bdf3-7113-4993-b294-d33073462e4d-kube-api-access-wn4jj\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:20 crc kubenswrapper[4592]: I0929 17:11:20.838720 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.176441 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-bee2-account-create-rrx9l" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.176459 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-bee2-account-create-rrx9l" event={"ID":"0167bdf3-7113-4993-b294-d33073462e4d","Type":"ContainerDied","Data":"89c7169e8b7914ce46fc326a44ce5da5fb42cc201036a0aefa6d8b12040b0db6"} Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.176598 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="89c7169e8b7914ce46fc326a44ce5da5fb42cc201036a0aefa6d8b12040b0db6" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.209588 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.209864 4592 generic.go:334] "Generic (PLEG): container finished" podID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerID="f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f" exitCode=0 Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.213926 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28597e2b-61b9-4213-9980-deb0f1041e27" path="/var/lib/kubelet/pods/28597e2b-61b9-4213-9980-deb0f1041e27/volumes" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.215740 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e42af79d-fc77-4451-8550-cbd866e1eabe","Type":"ContainerStarted","Data":"07d41a0c65e49d7110a476e9afcc4a685a0a4f764791c9c1e07717839545c9a3"} Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.215766 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerDied","Data":"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f"} Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.215780 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8d689bd9-9bed-4f8f-a232-f91845d55937","Type":"ContainerDied","Data":"c1beab9ddb1332c3b525b80986967a249f5ce99dfe417130f7a54801509eabb9"} Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.215789 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f396e95c-bf51-4e4d-9dc7-76188423316b","Type":"ContainerStarted","Data":"5c2a9dc5abce838e4d96fbc1f23ccc825ecfe09de3d8580afe26da34ee0b61ba"} Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.215807 4592 scope.go:117] "RemoveContainer" containerID="4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.271548 4592 scope.go:117] "RemoveContainer" containerID="28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.312825 4592 scope.go:117] "RemoveContainer" containerID="f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.341000 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpr8p\" (UniqueName: \"kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.345307 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.349901 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.350313 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.350400 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.350515 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.350593 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.350678 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data\") pod \"8d689bd9-9bed-4f8f-a232-f91845d55937\" (UID: \"8d689bd9-9bed-4f8f-a232-f91845d55937\") " Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.351582 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.355456 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p" (OuterVolumeSpecName: "kube-api-access-rpr8p") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "kube-api-access-rpr8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.357481 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.379410 4592 scope.go:117] "RemoveContainer" containerID="2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.386305 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts" (OuterVolumeSpecName: "scripts") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.426467 4592 scope.go:117] "RemoveContainer" containerID="4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2" Sep 29 17:11:21 crc kubenswrapper[4592]: E0929 17:11:21.427337 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2\": container with ID starting with 4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2 not found: ID does not exist" containerID="4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.427376 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2"} err="failed to get container status \"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2\": rpc error: code = NotFound desc = could not find container \"4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2\": container with ID starting with 4fac39b33efc3d0321aa99709eee65da6a70be0bf6f4735c92adc243a44f69b2 not found: ID does not exist" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.427399 4592 scope.go:117] "RemoveContainer" containerID="28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0" Sep 29 17:11:21 crc kubenswrapper[4592]: E0929 17:11:21.429966 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0\": container with ID starting with 28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0 not found: ID does not exist" containerID="28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.430021 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0"} err="failed to get container status \"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0\": rpc error: code = NotFound desc = could not find container \"28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0\": container with ID starting with 28efa3d3269e828de21a41d5933a01f81b9f6a45c1ef3cdde579cf50bffd58f0 not found: ID does not exist" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.430040 4592 scope.go:117] "RemoveContainer" containerID="f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f" Sep 29 17:11:21 crc kubenswrapper[4592]: E0929 17:11:21.445745 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f\": container with ID starting with f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f not found: ID does not exist" containerID="f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.445787 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f"} err="failed to get container status \"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f\": rpc error: code = NotFound desc = could not find container \"f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f\": container with ID starting with f9848b06081a36380ca9cfe54fe1d438d52c360e50e18d06ce502646e0bc414f not found: ID does not exist" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.445822 4592 scope.go:117] "RemoveContainer" containerID="2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3" Sep 29 17:11:21 crc kubenswrapper[4592]: E0929 17:11:21.451057 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3\": container with ID starting with 2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3 not found: ID does not exist" containerID="2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.451094 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3"} err="failed to get container status \"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3\": rpc error: code = NotFound desc = could not find container \"2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3\": container with ID starting with 2725c68e75ab52000f75974f64a4bb107173ebefdabc56f6903fefd52f863ba3 not found: ID does not exist" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.453322 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpr8p\" (UniqueName: \"kubernetes.io/projected/8d689bd9-9bed-4f8f-a232-f91845d55937-kube-api-access-rpr8p\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.453342 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8d689bd9-9bed-4f8f-a232-f91845d55937-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.453353 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.478308 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.497283 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.551383 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data" (OuterVolumeSpecName: "config-data") pod "8d689bd9-9bed-4f8f-a232-f91845d55937" (UID: "8d689bd9-9bed-4f8f-a232-f91845d55937"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.555578 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.555621 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:21 crc kubenswrapper[4592]: I0929 17:11:21.555639 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d689bd9-9bed-4f8f-a232-f91845d55937-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.220402 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e42af79d-fc77-4451-8550-cbd866e1eabe","Type":"ContainerStarted","Data":"1f5593c968598b64e681886c384dd5aba597ac300202ef03313f935a427a15cb"} Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.222022 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.225227 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f396e95c-bf51-4e4d-9dc7-76188423316b","Type":"ContainerStarted","Data":"bdb532f4b9b1c45abfe099cbcabdc3cf4ba63df8becbc0555ff81b2389644715"} Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.265180 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.274474 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.284795 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:22 crc kubenswrapper[4592]: E0929 17:11:22.285278 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="proxy-httpd" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285300 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="proxy-httpd" Sep 29 17:11:22 crc kubenswrapper[4592]: E0929 17:11:22.285368 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-central-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285376 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-central-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: E0929 17:11:22.285392 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="sg-core" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285400 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="sg-core" Sep 29 17:11:22 crc kubenswrapper[4592]: E0929 17:11:22.285413 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-notification-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285421 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-notification-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: E0929 17:11:22.285436 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0167bdf3-7113-4993-b294-d33073462e4d" containerName="mariadb-account-create" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285443 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0167bdf3-7113-4993-b294-d33073462e4d" containerName="mariadb-account-create" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285662 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="proxy-httpd" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285685 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0167bdf3-7113-4993-b294-d33073462e4d" containerName="mariadb-account-create" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285701 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-notification-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285713 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="ceilometer-central-agent" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.285725 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" containerName="sg-core" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.288187 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.295426 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.296031 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.301223 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376183 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376255 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376285 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376321 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqpzm\" (UniqueName: \"kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376365 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376387 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.376426 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.477203 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478165 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478277 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478359 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478460 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478537 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.478606 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.479377 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.481467 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.481625 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqpzm\" (UniqueName: \"kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.496722 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.497920 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.515497 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqpzm\" (UniqueName: \"kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.518273 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.642316 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hptl4"] Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.643575 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.650479 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.650716 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-75vj9" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.650963 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.659112 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hptl4"] Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.671838 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.684894 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.685017 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.685058 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.685109 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9zhp\" (UniqueName: \"kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.786392 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9zhp\" (UniqueName: \"kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.786722 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.786843 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.786899 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.790887 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.794532 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.796250 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:22 crc kubenswrapper[4592]: I0929 17:11:22.808808 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9zhp\" (UniqueName: \"kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp\") pod \"nova-cell0-conductor-db-sync-hptl4\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.024829 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.204515 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d689bd9-9bed-4f8f-a232-f91845d55937" path="/var/lib/kubelet/pods/8d689bd9-9bed-4f8f-a232-f91845d55937/volumes" Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.257708 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e42af79d-fc77-4451-8550-cbd866e1eabe","Type":"ContainerStarted","Data":"0740a90d16b1d9846d0339cf52709178784cb6f0ef187b30c86015de4bf7f638"} Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.273447 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.282015 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.282000457 podStartE2EDuration="4.282000457s" podCreationTimestamp="2025-09-29 17:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:11:23.281647158 +0000 UTC m=+1213.429424839" watchObservedRunningTime="2025-09-29 17:11:23.282000457 +0000 UTC m=+1213.429778138" Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.289667 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"f396e95c-bf51-4e4d-9dc7-76188423316b","Type":"ContainerStarted","Data":"a97dd623bc01c7f520f0d2980b15dc4ed3a6a66a2ceb532509817f309aaaec13"} Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.367086 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.367058903 podStartE2EDuration="4.367058903s" podCreationTimestamp="2025-09-29 17:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:11:23.323523827 +0000 UTC m=+1213.471301508" watchObservedRunningTime="2025-09-29 17:11:23.367058903 +0000 UTC m=+1213.514836594" Sep 29 17:11:23 crc kubenswrapper[4592]: W0929 17:11:23.529681 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod617a27c8_02ce_43f0_a41d_230af300cafe.slice/crio-0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa WatchSource:0}: Error finding container 0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa: Status 404 returned error can't find the container with id 0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa Sep 29 17:11:23 crc kubenswrapper[4592]: I0929 17:11:23.531337 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hptl4"] Sep 29 17:11:24 crc kubenswrapper[4592]: I0929 17:11:24.299753 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerStarted","Data":"5f2ac64843246d1ba7fea87779005e717e6110b31a74a4fb152ac77d3f5a468d"} Sep 29 17:11:24 crc kubenswrapper[4592]: I0929 17:11:24.301071 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hptl4" event={"ID":"617a27c8-02ce-43f0-a41d-230af300cafe","Type":"ContainerStarted","Data":"0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa"} Sep 29 17:11:26 crc kubenswrapper[4592]: I0929 17:11:26.103554 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:26 crc kubenswrapper[4592]: I0929 17:11:26.313027 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:11:26 crc kubenswrapper[4592]: I0929 17:11:26.482394 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:11:27 crc kubenswrapper[4592]: I0929 17:11:27.336173 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerStarted","Data":"3fe2534bb10ffc71051e4bd2ddd215bd2670891789ab75c8a9d0c79170c41aa6"} Sep 29 17:11:29 crc kubenswrapper[4592]: I0929 17:11:29.612689 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:29 crc kubenswrapper[4592]: I0929 17:11:29.613181 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:29 crc kubenswrapper[4592]: I0929 17:11:29.661710 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:29 crc kubenswrapper[4592]: I0929 17:11:29.706793 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.155868 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.155939 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.207374 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.249950 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.370537 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.370935 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.371131 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.371319 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.883271 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:11:30 crc kubenswrapper[4592]: I0929 17:11:30.883620 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:11:32 crc kubenswrapper[4592]: I0929 17:11:32.394721 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:11:32 crc kubenswrapper[4592]: I0929 17:11:32.396450 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:11:33 crc kubenswrapper[4592]: I0929 17:11:33.404988 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerStarted","Data":"7ff6084adf10ea6848867ca76c3b955d6e0292ab3f3e4c6670fea29a7b36b699"} Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.312382 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.313316 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.314460 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929"} pod="openstack/horizon-c9567f99b-8nh47" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.314528 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" containerID="cri-o://cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929" gracePeriod=30 Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.481083 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.481230 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.482242 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"a5bd2ab6ff6d21a98c48e8f0a10906fee7808ff984fbf29492d97a299aa60c56"} pod="openstack/horizon-749bb4c784-lnncs" containerMessage="Container horizon failed startup probe, will be restarted" Sep 29 17:11:36 crc kubenswrapper[4592]: I0929 17:11:36.482305 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" containerID="cri-o://a5bd2ab6ff6d21a98c48e8f0a10906fee7808ff984fbf29492d97a299aa60c56" gracePeriod=30 Sep 29 17:11:44 crc kubenswrapper[4592]: E0929 17:11:44.214488 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified" Sep 29 17:11:44 crc kubenswrapper[4592]: E0929 17:11:44.215976 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:nova-cell0-conductor-db-sync,Image:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CELL_NAME,Value:cell0,ValueFrom:nil,},EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:false,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/kolla/config_files/config.json,SubPath:nova-conductor-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k9zhp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42436,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-cell0-conductor-db-sync-hptl4_openstack(617a27c8-02ce-43f0-a41d-230af300cafe): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:11:44 crc kubenswrapper[4592]: E0929 17:11:44.217605 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/nova-cell0-conductor-db-sync-hptl4" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" Sep 29 17:11:44 crc kubenswrapper[4592]: E0929 17:11:44.513645 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified\\\"\"" pod="openstack/nova-cell0-conductor-db-sync-hptl4" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.544213 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerStarted","Data":"bddb2eb0a54b382350aa3238aa179a1494591ee97939660dfd478eb679c19932"} Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.546986 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"d6e91b2c-f8ba-4654-8431-a50545a2c37b","Type":"ContainerStarted","Data":"88f05580a7557861c8fa679e5df48cb2006ad4f7603979c8581f157a9709d98e"} Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.572347 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.674446299 podStartE2EDuration="55.572329869s" podCreationTimestamp="2025-09-29 17:10:53 +0000 UTC" firstStartedPulling="2025-09-29 17:10:54.410113944 +0000 UTC m=+1184.557891625" lastFinishedPulling="2025-09-29 17:11:47.307997514 +0000 UTC m=+1237.455775195" observedRunningTime="2025-09-29 17:11:48.566658492 +0000 UTC m=+1238.714436183" watchObservedRunningTime="2025-09-29 17:11:48.572329869 +0000 UTC m=+1238.720107550" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.691594 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.691719 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.707857 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.707956 4592 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.745999 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Sep 29 17:11:48 crc kubenswrapper[4592]: I0929 17:11:48.815560 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.562950 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-central-agent" containerID="cri-o://3fe2534bb10ffc71051e4bd2ddd215bd2670891789ab75c8a9d0c79170c41aa6" gracePeriod=30 Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.563435 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerStarted","Data":"62afb41f86a1b570254afeb6cf7e05472d337a724a154003ca77a3e570c0eb0b"} Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.563473 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.563550 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="proxy-httpd" containerID="cri-o://62afb41f86a1b570254afeb6cf7e05472d337a724a154003ca77a3e570c0eb0b" gracePeriod=30 Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.563625 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="sg-core" containerID="cri-o://bddb2eb0a54b382350aa3238aa179a1494591ee97939660dfd478eb679c19932" gracePeriod=30 Sep 29 17:11:49 crc kubenswrapper[4592]: I0929 17:11:49.563674 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-notification-agent" containerID="cri-o://7ff6084adf10ea6848867ca76c3b955d6e0292ab3f3e4c6670fea29a7b36b699" gracePeriod=30 Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.618358 4592 generic.go:334] "Generic (PLEG): container finished" podID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerID="62afb41f86a1b570254afeb6cf7e05472d337a724a154003ca77a3e570c0eb0b" exitCode=0 Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.618948 4592 generic.go:334] "Generic (PLEG): container finished" podID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerID="bddb2eb0a54b382350aa3238aa179a1494591ee97939660dfd478eb679c19932" exitCode=2 Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.618961 4592 generic.go:334] "Generic (PLEG): container finished" podID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerID="7ff6084adf10ea6848867ca76c3b955d6e0292ab3f3e4c6670fea29a7b36b699" exitCode=0 Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.618970 4592 generic.go:334] "Generic (PLEG): container finished" podID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerID="3fe2534bb10ffc71051e4bd2ddd215bd2670891789ab75c8a9d0c79170c41aa6" exitCode=0 Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.618950 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerDied","Data":"62afb41f86a1b570254afeb6cf7e05472d337a724a154003ca77a3e570c0eb0b"} Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.619038 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerDied","Data":"bddb2eb0a54b382350aa3238aa179a1494591ee97939660dfd478eb679c19932"} Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.619056 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerDied","Data":"7ff6084adf10ea6848867ca76c3b955d6e0292ab3f3e4c6670fea29a7b36b699"} Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.619085 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerDied","Data":"3fe2534bb10ffc71051e4bd2ddd215bd2670891789ab75c8a9d0c79170c41aa6"} Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.746748 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.853755 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.853820 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.853871 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqpzm\" (UniqueName: \"kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.853949 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.853990 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.854065 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.854108 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts\") pod \"4879ab43-62b0-46cf-a025-ffdacfa73641\" (UID: \"4879ab43-62b0-46cf-a025-ffdacfa73641\") " Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.863422 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.863935 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.873371 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts" (OuterVolumeSpecName: "scripts") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.899307 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm" (OuterVolumeSpecName: "kube-api-access-dqpzm") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "kube-api-access-dqpzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.956550 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.956590 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqpzm\" (UniqueName: \"kubernetes.io/projected/4879ab43-62b0-46cf-a025-ffdacfa73641-kube-api-access-dqpzm\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.956605 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4879ab43-62b0-46cf-a025-ffdacfa73641-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.956616 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:50 crc kubenswrapper[4592]: I0929 17:11:50.956911 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.058537 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.097514 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.127234 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data" (OuterVolumeSpecName: "config-data") pod "4879ab43-62b0-46cf-a025-ffdacfa73641" (UID: "4879ab43-62b0-46cf-a025-ffdacfa73641"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.160523 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.160553 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4879ab43-62b0-46cf-a025-ffdacfa73641-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.630729 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4879ab43-62b0-46cf-a025-ffdacfa73641","Type":"ContainerDied","Data":"5f2ac64843246d1ba7fea87779005e717e6110b31a74a4fb152ac77d3f5a468d"} Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.630790 4592 scope.go:117] "RemoveContainer" containerID="62afb41f86a1b570254afeb6cf7e05472d337a724a154003ca77a3e570c0eb0b" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.630966 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.656272 4592 scope.go:117] "RemoveContainer" containerID="bddb2eb0a54b382350aa3238aa179a1494591ee97939660dfd478eb679c19932" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.672061 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.681344 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.682239 4592 scope.go:117] "RemoveContainer" containerID="7ff6084adf10ea6848867ca76c3b955d6e0292ab3f3e4c6670fea29a7b36b699" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.705762 4592 scope.go:117] "RemoveContainer" containerID="3fe2534bb10ffc71051e4bd2ddd215bd2670891789ab75c8a9d0c79170c41aa6" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.717506 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:51 crc kubenswrapper[4592]: E0929 17:11:51.717874 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-notification-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.717892 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-notification-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: E0929 17:11:51.717901 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-central-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.717907 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-central-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: E0929 17:11:51.717923 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="proxy-httpd" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.717928 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="proxy-httpd" Sep 29 17:11:51 crc kubenswrapper[4592]: E0929 17:11:51.717941 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="sg-core" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.717946 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="sg-core" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.718120 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="proxy-httpd" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.718133 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-notification-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.718184 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="sg-core" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.718203 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" containerName="ceilometer-central-agent" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.719783 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.722392 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.722694 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.748069 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787408 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787516 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787556 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x2gz\" (UniqueName: \"kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787584 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787628 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787651 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.787681 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.888745 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.889097 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.889225 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.889322 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.889452 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.890066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.890293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.890425 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x2gz\" (UniqueName: \"kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.890977 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.893731 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.893829 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.894096 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.910521 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:51 crc kubenswrapper[4592]: I0929 17:11:51.916192 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x2gz\" (UniqueName: \"kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz\") pod \"ceilometer-0\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " pod="openstack/ceilometer-0" Sep 29 17:11:52 crc kubenswrapper[4592]: I0929 17:11:52.039744 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:11:52 crc kubenswrapper[4592]: I0929 17:11:52.615855 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:52 crc kubenswrapper[4592]: I0929 17:11:52.653911 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerStarted","Data":"8963eb04b5bfcb123c5893de62e316fb70526b93c3c146b4781c560133c639ea"} Sep 29 17:11:52 crc kubenswrapper[4592]: I0929 17:11:52.768327 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:11:53 crc kubenswrapper[4592]: I0929 17:11:53.196084 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4879ab43-62b0-46cf-a025-ffdacfa73641" path="/var/lib/kubelet/pods/4879ab43-62b0-46cf-a025-ffdacfa73641/volumes" Sep 29 17:11:53 crc kubenswrapper[4592]: I0929 17:11:53.680358 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerStarted","Data":"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582"} Sep 29 17:11:54 crc kubenswrapper[4592]: I0929 17:11:54.695111 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerStarted","Data":"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5"} Sep 29 17:11:55 crc kubenswrapper[4592]: I0929 17:11:55.705396 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerStarted","Data":"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c"} Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.715376 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerStarted","Data":"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb"} Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.717076 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.715539 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="sg-core" containerID="cri-o://f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c" gracePeriod=30 Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.715539 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-notification-agent" containerID="cri-o://7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5" gracePeriod=30 Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.715570 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="proxy-httpd" containerID="cri-o://687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb" gracePeriod=30 Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.715478 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-central-agent" containerID="cri-o://2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582" gracePeriod=30 Sep 29 17:11:56 crc kubenswrapper[4592]: I0929 17:11:56.743398 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.18785013 podStartE2EDuration="5.743372214s" podCreationTimestamp="2025-09-29 17:11:51 +0000 UTC" firstStartedPulling="2025-09-29 17:11:52.608467349 +0000 UTC m=+1242.756245030" lastFinishedPulling="2025-09-29 17:11:56.163989433 +0000 UTC m=+1246.311767114" observedRunningTime="2025-09-29 17:11:56.740607528 +0000 UTC m=+1246.888385209" watchObservedRunningTime="2025-09-29 17:11:56.743372214 +0000 UTC m=+1246.891149885" Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728247 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerID="687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb" exitCode=0 Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728288 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerID="f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c" exitCode=2 Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728297 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerID="7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5" exitCode=0 Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728319 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerDied","Data":"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb"} Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728349 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerDied","Data":"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c"} Sep 29 17:11:57 crc kubenswrapper[4592]: I0929 17:11:57.728361 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerDied","Data":"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5"} Sep 29 17:11:59 crc kubenswrapper[4592]: I0929 17:11:59.748278 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hptl4" event={"ID":"617a27c8-02ce-43f0-a41d-230af300cafe","Type":"ContainerStarted","Data":"9f90897c66c8e5737be4f073a265520a2a9ccb80861b5d2042a54f8955e1be71"} Sep 29 17:11:59 crc kubenswrapper[4592]: I0929 17:11:59.767316 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-hptl4" podStartSLOduration=2.620994916 podStartE2EDuration="37.767297814s" podCreationTimestamp="2025-09-29 17:11:22 +0000 UTC" firstStartedPulling="2025-09-29 17:11:23.531045711 +0000 UTC m=+1213.678823392" lastFinishedPulling="2025-09-29 17:11:58.677348609 +0000 UTC m=+1248.825126290" observedRunningTime="2025-09-29 17:11:59.761862794 +0000 UTC m=+1249.909640475" watchObservedRunningTime="2025-09-29 17:11:59.767297814 +0000 UTC m=+1249.915075495" Sep 29 17:12:00 crc kubenswrapper[4592]: I0929 17:12:00.883621 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:12:00 crc kubenswrapper[4592]: I0929 17:12:00.884709 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.310113 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463401 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463477 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463502 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463557 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463605 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463665 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.463749 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x2gz\" (UniqueName: \"kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz\") pod \"8e069dfe-7a70-493b-b753-fc3855cf34cf\" (UID: \"8e069dfe-7a70-493b-b753-fc3855cf34cf\") " Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.464193 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.464453 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.464753 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.464776 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8e069dfe-7a70-493b-b753-fc3855cf34cf-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.470354 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts" (OuterVolumeSpecName: "scripts") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.470381 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz" (OuterVolumeSpecName: "kube-api-access-4x2gz") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "kube-api-access-4x2gz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.495983 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.566447 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x2gz\" (UniqueName: \"kubernetes.io/projected/8e069dfe-7a70-493b-b753-fc3855cf34cf-kube-api-access-4x2gz\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.566858 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.567006 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.570846 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.581122 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data" (OuterVolumeSpecName: "config-data") pod "8e069dfe-7a70-493b-b753-fc3855cf34cf" (UID: "8e069dfe-7a70-493b-b753-fc3855cf34cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.668250 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.668289 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e069dfe-7a70-493b-b753-fc3855cf34cf-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.770915 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerID="2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582" exitCode=0 Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.770975 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerDied","Data":"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582"} Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.771032 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8e069dfe-7a70-493b-b753-fc3855cf34cf","Type":"ContainerDied","Data":"8963eb04b5bfcb123c5893de62e316fb70526b93c3c146b4781c560133c639ea"} Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.771029 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.771056 4592 scope.go:117] "RemoveContainer" containerID="687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.812688 4592 scope.go:117] "RemoveContainer" containerID="f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.817527 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.827800 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.836736 4592 scope.go:117] "RemoveContainer" containerID="7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838127 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.838466 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-central-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838480 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-central-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.838498 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="sg-core" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838503 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="sg-core" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.838515 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-notification-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838521 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-notification-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.838539 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="proxy-httpd" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838544 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="proxy-httpd" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838707 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-notification-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838722 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="ceilometer-central-agent" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838737 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="sg-core" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.838751 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" containerName="proxy-httpd" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.840240 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.842488 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.845439 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.864617 4592 scope.go:117] "RemoveContainer" containerID="2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.870992 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.904168 4592 scope.go:117] "RemoveContainer" containerID="687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.905137 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb\": container with ID starting with 687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb not found: ID does not exist" containerID="687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.905288 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb"} err="failed to get container status \"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb\": rpc error: code = NotFound desc = could not find container \"687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb\": container with ID starting with 687477a2289c099e0791a5024007d2909fd586cbb22455752319d488ebcc2dfb not found: ID does not exist" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.905371 4592 scope.go:117] "RemoveContainer" containerID="f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.905807 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c\": container with ID starting with f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c not found: ID does not exist" containerID="f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.905914 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c"} err="failed to get container status \"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c\": rpc error: code = NotFound desc = could not find container \"f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c\": container with ID starting with f23c2d302495bf0ede6147ac91f8bf0a1c81d13c1028a86afebd95689cffd14c not found: ID does not exist" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.905965 4592 scope.go:117] "RemoveContainer" containerID="7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.906313 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5\": container with ID starting with 7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5 not found: ID does not exist" containerID="7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.906407 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5"} err="failed to get container status \"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5\": rpc error: code = NotFound desc = could not find container \"7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5\": container with ID starting with 7cefed7e47b27c2fb684c90f8f13d9b8874c422a502b96e3c45f7e49841924f5 not found: ID does not exist" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.906481 4592 scope.go:117] "RemoveContainer" containerID="2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582" Sep 29 17:12:01 crc kubenswrapper[4592]: E0929 17:12:01.906720 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582\": container with ID starting with 2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582 not found: ID does not exist" containerID="2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.906795 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582"} err="failed to get container status \"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582\": rpc error: code = NotFound desc = could not find container \"2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582\": container with ID starting with 2df8a583d724a4b01407e506fd5add8915e74d3e3c5fdd08c4cb92da867b4582 not found: ID does not exist" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.973938 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.973975 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.974040 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.974120 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.974181 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.974277 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:01 crc kubenswrapper[4592]: I0929 17:12:01.974365 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khj9m\" (UniqueName: \"kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.075942 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076030 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khj9m\" (UniqueName: \"kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076073 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076089 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076120 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076194 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076218 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076598 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.076821 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.080840 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.088672 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.096898 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.097285 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.104754 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khj9m\" (UniqueName: \"kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m\") pod \"ceilometer-0\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.165124 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:02 crc kubenswrapper[4592]: W0929 17:12:02.639686 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e1b0183_5638_42d4_85fc_498f16b43305.slice/crio-dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0 WatchSource:0}: Error finding container dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0: Status 404 returned error can't find the container with id dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0 Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.643458 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:02 crc kubenswrapper[4592]: I0929 17:12:02.783023 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerStarted","Data":"dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0"} Sep 29 17:12:03 crc kubenswrapper[4592]: I0929 17:12:03.214427 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e069dfe-7a70-493b-b753-fc3855cf34cf" path="/var/lib/kubelet/pods/8e069dfe-7a70-493b-b753-fc3855cf34cf/volumes" Sep 29 17:12:03 crc kubenswrapper[4592]: I0929 17:12:03.792061 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerStarted","Data":"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b"} Sep 29 17:12:04 crc kubenswrapper[4592]: I0929 17:12:04.353254 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:04 crc kubenswrapper[4592]: I0929 17:12:04.818549 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerStarted","Data":"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a"} Sep 29 17:12:05 crc kubenswrapper[4592]: I0929 17:12:05.827779 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerStarted","Data":"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.837023 4592 generic.go:334] "Generic (PLEG): container finished" podID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerID="cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929" exitCode=137 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.838592 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerDied","Data":"cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.838727 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerStarted","Data":"8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.838841 4592 scope.go:117] "RemoveContainer" containerID="5cfe4146468dedf86aeb20915c49e1cd273dffa7a3e1db160e17e0a28afe71ab" Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.853515 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerStarted","Data":"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.854454 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.854031 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="sg-core" containerID="cri-o://bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748" gracePeriod=30 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.854044 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="proxy-httpd" containerID="cri-o://8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf" gracePeriod=30 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.854055 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-notification-agent" containerID="cri-o://c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a" gracePeriod=30 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.853826 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-central-agent" containerID="cri-o://8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b" gracePeriod=30 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.873413 4592 generic.go:334] "Generic (PLEG): container finished" podID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerID="a5bd2ab6ff6d21a98c48e8f0a10906fee7808ff984fbf29492d97a299aa60c56" exitCode=137 Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.873460 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerDied","Data":"a5bd2ab6ff6d21a98c48e8f0a10906fee7808ff984fbf29492d97a299aa60c56"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.873492 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-749bb4c784-lnncs" event={"ID":"2d536771-b1ae-4daf-a9f1-1a86e2af88e8","Type":"ContainerStarted","Data":"43c2cc931034db8bc7b4eac8e52633e6699a5505ace48b6ddaa0f805bf553464"} Sep 29 17:12:06 crc kubenswrapper[4592]: I0929 17:12:06.893979 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.047161315 podStartE2EDuration="5.893963524s" podCreationTimestamp="2025-09-29 17:12:01 +0000 UTC" firstStartedPulling="2025-09-29 17:12:02.643417218 +0000 UTC m=+1252.791194909" lastFinishedPulling="2025-09-29 17:12:06.490219437 +0000 UTC m=+1256.637997118" observedRunningTime="2025-09-29 17:12:06.889585153 +0000 UTC m=+1257.037362834" watchObservedRunningTime="2025-09-29 17:12:06.893963524 +0000 UTC m=+1257.041741205" Sep 29 17:12:07 crc kubenswrapper[4592]: I0929 17:12:07.056655 4592 scope.go:117] "RemoveContainer" containerID="5866d8efe686127c7c7b10f8621f579880df1ada3ef7a4bb255d56617124a27d" Sep 29 17:12:07 crc kubenswrapper[4592]: I0929 17:12:07.893783 4592 generic.go:334] "Generic (PLEG): container finished" podID="5e1b0183-5638-42d4-85fc-498f16b43305" containerID="bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748" exitCode=2 Sep 29 17:12:07 crc kubenswrapper[4592]: I0929 17:12:07.894236 4592 generic.go:334] "Generic (PLEG): container finished" podID="5e1b0183-5638-42d4-85fc-498f16b43305" containerID="c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a" exitCode=0 Sep 29 17:12:07 crc kubenswrapper[4592]: I0929 17:12:07.894324 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerDied","Data":"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748"} Sep 29 17:12:07 crc kubenswrapper[4592]: I0929 17:12:07.894370 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerDied","Data":"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a"} Sep 29 17:12:08 crc kubenswrapper[4592]: I0929 17:12:08.912728 4592 generic.go:334] "Generic (PLEG): container finished" podID="5e1b0183-5638-42d4-85fc-498f16b43305" containerID="8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b" exitCode=0 Sep 29 17:12:08 crc kubenswrapper[4592]: I0929 17:12:08.912865 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerDied","Data":"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b"} Sep 29 17:12:11 crc kubenswrapper[4592]: I0929 17:12:11.945643 4592 generic.go:334] "Generic (PLEG): container finished" podID="617a27c8-02ce-43f0-a41d-230af300cafe" containerID="9f90897c66c8e5737be4f073a265520a2a9ccb80861b5d2042a54f8955e1be71" exitCode=0 Sep 29 17:12:11 crc kubenswrapper[4592]: I0929 17:12:11.945698 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hptl4" event={"ID":"617a27c8-02ce-43f0-a41d-230af300cafe","Type":"ContainerDied","Data":"9f90897c66c8e5737be4f073a265520a2a9ccb80861b5d2042a54f8955e1be71"} Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.340279 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.412049 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle\") pod \"617a27c8-02ce-43f0-a41d-230af300cafe\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.412108 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9zhp\" (UniqueName: \"kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp\") pod \"617a27c8-02ce-43f0-a41d-230af300cafe\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.412172 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts\") pod \"617a27c8-02ce-43f0-a41d-230af300cafe\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.412203 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data\") pod \"617a27c8-02ce-43f0-a41d-230af300cafe\" (UID: \"617a27c8-02ce-43f0-a41d-230af300cafe\") " Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.418373 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp" (OuterVolumeSpecName: "kube-api-access-k9zhp") pod "617a27c8-02ce-43f0-a41d-230af300cafe" (UID: "617a27c8-02ce-43f0-a41d-230af300cafe"). InnerVolumeSpecName "kube-api-access-k9zhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.421335 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts" (OuterVolumeSpecName: "scripts") pod "617a27c8-02ce-43f0-a41d-230af300cafe" (UID: "617a27c8-02ce-43f0-a41d-230af300cafe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.447769 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data" (OuterVolumeSpecName: "config-data") pod "617a27c8-02ce-43f0-a41d-230af300cafe" (UID: "617a27c8-02ce-43f0-a41d-230af300cafe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.451038 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "617a27c8-02ce-43f0-a41d-230af300cafe" (UID: "617a27c8-02ce-43f0-a41d-230af300cafe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.513873 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.513912 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9zhp\" (UniqueName: \"kubernetes.io/projected/617a27c8-02ce-43f0-a41d-230af300cafe-kube-api-access-k9zhp\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.513926 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.513935 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/617a27c8-02ce-43f0-a41d-230af300cafe-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.968882 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-hptl4" event={"ID":"617a27c8-02ce-43f0-a41d-230af300cafe","Type":"ContainerDied","Data":"0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa"} Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.968926 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c0801d7cb72d7afe824d6fa9a91ff0f58ce2c455a0e7f614ce8c5069d8cf6aa" Sep 29 17:12:13 crc kubenswrapper[4592]: I0929 17:12:13.969446 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-hptl4" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.095724 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 17:12:14 crc kubenswrapper[4592]: E0929 17:12:14.107245 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" containerName="nova-cell0-conductor-db-sync" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.107263 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" containerName="nova-cell0-conductor-db-sync" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.107452 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" containerName="nova-cell0-conductor-db-sync" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.107934 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.108008 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.138548 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.138769 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-75vj9" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.226785 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvlpq\" (UniqueName: \"kubernetes.io/projected/ecda42b4-525c-464f-ab13-394434750d4a-kube-api-access-tvlpq\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.227167 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.227230 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.329038 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvlpq\" (UniqueName: \"kubernetes.io/projected/ecda42b4-525c-464f-ab13-394434750d4a-kube-api-access-tvlpq\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.329157 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.329190 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.334339 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.340675 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecda42b4-525c-464f-ab13-394434750d4a-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.351904 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvlpq\" (UniqueName: \"kubernetes.io/projected/ecda42b4-525c-464f-ab13-394434750d4a-kube-api-access-tvlpq\") pod \"nova-cell0-conductor-0\" (UID: \"ecda42b4-525c-464f-ab13-394434750d4a\") " pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.461916 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.940625 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Sep 29 17:12:14 crc kubenswrapper[4592]: I0929 17:12:14.981961 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ecda42b4-525c-464f-ab13-394434750d4a","Type":"ContainerStarted","Data":"a8a4dc95740a1e68b5b16e5350252c5b6893c0122028490c6b88b11736332354"} Sep 29 17:12:15 crc kubenswrapper[4592]: I0929 17:12:15.996688 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"ecda42b4-525c-464f-ab13-394434750d4a","Type":"ContainerStarted","Data":"204ed4072079fcdc242b10b937d75dde8cf5fba029cdc2850c90da66acb1b020"} Sep 29 17:12:15 crc kubenswrapper[4592]: I0929 17:12:15.998631 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.024554 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.024533167 podStartE2EDuration="2.024533167s" podCreationTimestamp="2025-09-29 17:12:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:16.01993276 +0000 UTC m=+1266.167710441" watchObservedRunningTime="2025-09-29 17:12:16.024533167 +0000 UTC m=+1266.172310858" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.311752 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.311831 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.314119 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.480964 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.481035 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:12:16 crc kubenswrapper[4592]: I0929 17:12:16.481997 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:12:24 crc kubenswrapper[4592]: I0929 17:12:24.494428 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.108636 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-pcfkx"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.110098 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.124692 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.129480 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.135177 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-pcfkx"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.150890 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.150967 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqhfv\" (UniqueName: \"kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.151066 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.151133 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.253216 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.253385 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.253429 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqhfv\" (UniqueName: \"kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.253500 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.295163 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.295665 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.299554 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.301717 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqhfv\" (UniqueName: \"kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv\") pod \"nova-cell0-cell-mapping-pcfkx\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.436405 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.455452 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.457470 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.466293 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.467921 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.479420 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.485760 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559241 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559274 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559291 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559316 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp4dn\" (UniqueName: \"kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559344 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws5s2\" (UniqueName: \"kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559366 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559416 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.559442 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.606250 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.627769 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.662905 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.662985 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663066 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663083 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663110 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663137 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp4dn\" (UniqueName: \"kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663181 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws5s2\" (UniqueName: \"kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.663203 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.667793 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.668066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.669323 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.679396 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.687342 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.687390 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.697059 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.700727 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.705974 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.716784 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.753779 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp4dn\" (UniqueName: \"kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn\") pod \"nova-api-0\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.766308 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkrwc\" (UniqueName: \"kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.766458 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.766511 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.767280 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws5s2\" (UniqueName: \"kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2\") pod \"nova-metadata-0\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.855060 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.856247 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.857008 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.863627 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.868985 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkrwc\" (UniqueName: \"kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.869064 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.869113 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.873765 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.873842 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.896021 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.896763 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.915801 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkrwc\" (UniqueName: \"kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc\") pod \"nova-scheduler-0\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.943328 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.944883 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.966614 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971645 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971755 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971787 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971833 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971871 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971913 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b49d2\" (UniqueName: \"kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.971955 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.972171 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndltn\" (UniqueName: \"kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:25 crc kubenswrapper[4592]: I0929 17:12:25.972326 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.075631 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.075994 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076040 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b49d2\" (UniqueName: \"kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076067 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076118 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndltn\" (UniqueName: \"kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076210 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076245 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076296 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.076320 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.077433 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.080276 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.080952 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.081827 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.081887 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.089387 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.089532 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.094587 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.116762 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b49d2\" (UniqueName: \"kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2\") pod \"nova-cell1-novncproxy-0\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.131522 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndltn\" (UniqueName: \"kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn\") pod \"dnsmasq-dns-865f5d856f-kgpkc\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.185751 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.277025 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.314675 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.481814 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-749bb4c784-lnncs" podUID="2d536771-b1ae-4daf-a9f1-1a86e2af88e8" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.510571 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-pcfkx"] Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.766215 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.915451 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.927822 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.976214 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:12:26 crc kubenswrapper[4592]: I0929 17:12:26.988362 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.176989 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" event={"ID":"df77059a-3702-42ad-a217-1d527ae7c8af","Type":"ContainerStarted","Data":"3bf57128fa0144e7f42d7c6b1c920941f8d8684c36edd4b89a12333a3137f9e1"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.178064 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerStarted","Data":"134ed776d0bd5435ba539c5639fb2f3873d8527e0ab23cdbb8aa56c6ab1d3ad3"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.179804 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e","Type":"ContainerStarted","Data":"1adc9af4376b2e2dc18190abbd782ffe586aacfd3b9a4c39e46494bfe3ce5940"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.181205 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pcfkx" event={"ID":"9b018763-574c-4186-9191-3342af9acbf3","Type":"ContainerStarted","Data":"96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.181346 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pcfkx" event={"ID":"9b018763-574c-4186-9191-3342af9acbf3","Type":"ContainerStarted","Data":"0143fe9a7abb32f99f0a0235e584c6f3e0084d323a51da62cfaa1d09bc0f535d"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.202507 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-pcfkx" podStartSLOduration=2.202489953 podStartE2EDuration="2.202489953s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:27.201558587 +0000 UTC m=+1277.349336268" watchObservedRunningTime="2025-09-29 17:12:27.202489953 +0000 UTC m=+1277.350267644" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.204842 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2eb51866-0f80-4027-a1bd-1519aee01031","Type":"ContainerStarted","Data":"9e52ee17dfa8e4bc32e60d8770d51fdea37859f4ff02871230346cd208a7b984"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.204881 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerStarted","Data":"ae81174309766f61ba31d249ecbf9414a1344d46b6bf761b50b90c9a3343b7c3"} Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.776509 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-gn5bh"] Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.777914 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.784705 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.784946 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.800976 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-gn5bh"] Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.820087 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.820201 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.820263 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvgg9\" (UniqueName: \"kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.820310 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.923406 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvgg9\" (UniqueName: \"kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.923481 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.923594 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.923634 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.931943 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.947738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.949321 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:27 crc kubenswrapper[4592]: I0929 17:12:27.981458 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvgg9\" (UniqueName: \"kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9\") pod \"nova-cell1-conductor-db-sync-gn5bh\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:28 crc kubenswrapper[4592]: I0929 17:12:28.120015 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:28 crc kubenswrapper[4592]: I0929 17:12:28.291725 4592 generic.go:334] "Generic (PLEG): container finished" podID="df77059a-3702-42ad-a217-1d527ae7c8af" containerID="be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1" exitCode=0 Sep 29 17:12:28 crc kubenswrapper[4592]: I0929 17:12:28.293234 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" event={"ID":"df77059a-3702-42ad-a217-1d527ae7c8af","Type":"ContainerDied","Data":"be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1"} Sep 29 17:12:28 crc kubenswrapper[4592]: I0929 17:12:28.502590 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-gn5bh"] Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.304848 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" event={"ID":"df77059a-3702-42ad-a217-1d527ae7c8af","Type":"ContainerStarted","Data":"865ad78aaaaf4f6d704ff809f1247aa134c6100d27d8cf665bb7f00e2be4af28"} Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.305250 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.310838 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" event={"ID":"feab94f4-b3ec-465b-8d59-22643f853dc8","Type":"ContainerStarted","Data":"f52982daad45aae0770d61f69be5335ba029b80378ba5c632387ba50df28b73c"} Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.310879 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" event={"ID":"feab94f4-b3ec-465b-8d59-22643f853dc8","Type":"ContainerStarted","Data":"84309058ce7f67972c5c8d89b9509b55513342ab6e85de316bd6902cae63d9b0"} Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.329881 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" podStartSLOduration=4.329860283 podStartE2EDuration="4.329860283s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:29.325920785 +0000 UTC m=+1279.473698466" watchObservedRunningTime="2025-09-29 17:12:29.329860283 +0000 UTC m=+1279.477637964" Sep 29 17:12:29 crc kubenswrapper[4592]: I0929 17:12:29.346413 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" podStartSLOduration=2.346398349 podStartE2EDuration="2.346398349s" podCreationTimestamp="2025-09-29 17:12:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:29.34100682 +0000 UTC m=+1279.488784501" watchObservedRunningTime="2025-09-29 17:12:29.346398349 +0000 UTC m=+1279.494176030" Sep 29 17:12:30 crc kubenswrapper[4592]: I0929 17:12:30.882785 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:12:30 crc kubenswrapper[4592]: I0929 17:12:30.883134 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:12:30 crc kubenswrapper[4592]: I0929 17:12:30.883198 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:12:30 crc kubenswrapper[4592]: I0929 17:12:30.884017 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:12:30 crc kubenswrapper[4592]: I0929 17:12:30.884090 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb" gracePeriod=600 Sep 29 17:12:31 crc kubenswrapper[4592]: I0929 17:12:31.033941 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:31 crc kubenswrapper[4592]: I0929 17:12:31.080857 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:12:31 crc kubenswrapper[4592]: I0929 17:12:31.350536 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb" exitCode=0 Sep 29 17:12:31 crc kubenswrapper[4592]: I0929 17:12:31.350749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb"} Sep 29 17:12:31 crc kubenswrapper[4592]: I0929 17:12:31.350937 4592 scope.go:117] "RemoveContainer" containerID="d3bcef6cdb62fe4e0e330bc04d7fcf2a1a90ac24ed21caa15b239bee09c268e1" Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.173741 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.360162 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2eb51866-0f80-4027-a1bd-1519aee01031","Type":"ContainerStarted","Data":"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.364426 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerStarted","Data":"428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.364491 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerStarted","Data":"378aee20856f944412f2afaeba4c867d23cba3ace3badde6bc3a475bfd570173"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.366420 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerStarted","Data":"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.366466 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerStarted","Data":"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.366589 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-log" containerID="cri-o://a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" gracePeriod=30 Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.366714 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-metadata" containerID="cri-o://ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" gracePeriod=30 Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.374043 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.378813 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e","Type":"ContainerStarted","Data":"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758"} Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.378955 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758" gracePeriod=30 Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.400209 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.119635141 podStartE2EDuration="7.400183483s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="2025-09-29 17:12:26.999783741 +0000 UTC m=+1277.147561422" lastFinishedPulling="2025-09-29 17:12:31.280332083 +0000 UTC m=+1281.428109764" observedRunningTime="2025-09-29 17:12:32.387875064 +0000 UTC m=+1282.535652745" watchObservedRunningTime="2025-09-29 17:12:32.400183483 +0000 UTC m=+1282.547961184" Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.418807 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.9043547629999997 podStartE2EDuration="7.418789927s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="2025-09-29 17:12:26.789875851 +0000 UTC m=+1276.937653532" lastFinishedPulling="2025-09-29 17:12:31.304311015 +0000 UTC m=+1281.452088696" observedRunningTime="2025-09-29 17:12:32.414125218 +0000 UTC m=+1282.561902899" watchObservedRunningTime="2025-09-29 17:12:32.418789927 +0000 UTC m=+1282.566567608" Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.442189 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.085863699 podStartE2EDuration="7.442173071s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="2025-09-29 17:12:26.922580482 +0000 UTC m=+1277.070358163" lastFinishedPulling="2025-09-29 17:12:31.278889854 +0000 UTC m=+1281.426667535" observedRunningTime="2025-09-29 17:12:32.437944955 +0000 UTC m=+1282.585722636" watchObservedRunningTime="2025-09-29 17:12:32.442173071 +0000 UTC m=+1282.589950742" Sep 29 17:12:32 crc kubenswrapper[4592]: I0929 17:12:32.521614 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.174731331 podStartE2EDuration="7.521594162s" podCreationTimestamp="2025-09-29 17:12:25 +0000 UTC" firstStartedPulling="2025-09-29 17:12:26.958430351 +0000 UTC m=+1277.106208032" lastFinishedPulling="2025-09-29 17:12:31.305293182 +0000 UTC m=+1281.453070863" observedRunningTime="2025-09-29 17:12:32.485543328 +0000 UTC m=+1282.633321039" watchObservedRunningTime="2025-09-29 17:12:32.521594162 +0000 UTC m=+1282.669371843" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.006078 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.066962 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data\") pod \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.067180 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs\") pod \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.067287 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ws5s2\" (UniqueName: \"kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2\") pod \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.067796 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs" (OuterVolumeSpecName: "logs") pod "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" (UID: "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.067327 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") pod \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.068376 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.078367 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2" (OuterVolumeSpecName: "kube-api-access-ws5s2") pod "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" (UID: "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1"). InnerVolumeSpecName "kube-api-access-ws5s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:33 crc kubenswrapper[4592]: E0929 17:12:33.097418 4592 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle podName:f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1 nodeName:}" failed. No retries permitted until 2025-09-29 17:12:33.597391014 +0000 UTC m=+1283.745168695 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle") pod "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" (UID: "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1") : error deleting /var/lib/kubelet/pods/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1/volume-subpaths: remove /var/lib/kubelet/pods/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1/volume-subpaths: no such file or directory Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.101394 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data" (OuterVolumeSpecName: "config-data") pod "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" (UID: "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.176810 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ws5s2\" (UniqueName: \"kubernetes.io/projected/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-kube-api-access-ws5s2\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.176842 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389137 4592 generic.go:334] "Generic (PLEG): container finished" podID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerID="ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" exitCode=0 Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389463 4592 generic.go:334] "Generic (PLEG): container finished" podID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerID="a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" exitCode=143 Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389206 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerDied","Data":"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1"} Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389529 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerDied","Data":"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26"} Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389557 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1","Type":"ContainerDied","Data":"134ed776d0bd5435ba539c5639fb2f3873d8527e0ab23cdbb8aa56c6ab1d3ad3"} Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389577 4592 scope.go:117] "RemoveContainer" containerID="ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.389260 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.409255 4592 scope.go:117] "RemoveContainer" containerID="a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.428520 4592 scope.go:117] "RemoveContainer" containerID="ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" Sep 29 17:12:33 crc kubenswrapper[4592]: E0929 17:12:33.428904 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1\": container with ID starting with ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1 not found: ID does not exist" containerID="ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.428950 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1"} err="failed to get container status \"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1\": rpc error: code = NotFound desc = could not find container \"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1\": container with ID starting with ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1 not found: ID does not exist" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.428980 4592 scope.go:117] "RemoveContainer" containerID="a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" Sep 29 17:12:33 crc kubenswrapper[4592]: E0929 17:12:33.429336 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26\": container with ID starting with a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26 not found: ID does not exist" containerID="a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.429373 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26"} err="failed to get container status \"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26\": rpc error: code = NotFound desc = could not find container \"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26\": container with ID starting with a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26 not found: ID does not exist" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.429401 4592 scope.go:117] "RemoveContainer" containerID="ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.429651 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1"} err="failed to get container status \"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1\": rpc error: code = NotFound desc = could not find container \"ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1\": container with ID starting with ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1 not found: ID does not exist" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.429676 4592 scope.go:117] "RemoveContainer" containerID="a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.429915 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26"} err="failed to get container status \"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26\": rpc error: code = NotFound desc = could not find container \"a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26\": container with ID starting with a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26 not found: ID does not exist" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.685416 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") pod \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\" (UID: \"f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1\") " Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.697934 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" (UID: "f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:33 crc kubenswrapper[4592]: I0929 17:12:33.787215 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.030849 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.043011 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.065472 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:34 crc kubenswrapper[4592]: E0929 17:12:34.065936 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-log" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.065956 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-log" Sep 29 17:12:34 crc kubenswrapper[4592]: E0929 17:12:34.065982 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-metadata" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.065991 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-metadata" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.066277 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-log" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.066298 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" containerName="nova-metadata-metadata" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.067346 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.070900 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.071479 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.106498 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.192497 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.193187 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.193317 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.193448 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.193561 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8k9n\" (UniqueName: \"kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.295361 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.295459 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8k9n\" (UniqueName: \"kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.295521 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.295562 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.295642 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.297055 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.301898 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.302055 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.305782 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.314011 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8k9n\" (UniqueName: \"kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n\") pod \"nova-metadata-0\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.386901 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:34 crc kubenswrapper[4592]: I0929 17:12:34.934402 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.197098 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1" path="/var/lib/kubelet/pods/f69f0ce5-b54d-4f2a-854a-8e9e62d7efb1/volumes" Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.411418 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerStarted","Data":"98829f86e15e8b4efb2cab2a6ce0ea8796c21bc4044e99d113bb9a0bd27a1182"} Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.411697 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerStarted","Data":"c85808fceb0a02e5dc9ba730a92429a2e20315f055c5c795f4836e02a5d240c1"} Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.411711 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerStarted","Data":"1c27f0e26a138cc8f98bbe28a049d27ad7cbf94709c1613e0f2f796f11bf7c40"} Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.440775 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.4407511720000001 podStartE2EDuration="1.440751172s" podCreationTimestamp="2025-09-29 17:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:35.431937279 +0000 UTC m=+1285.579714990" watchObservedRunningTime="2025-09-29 17:12:35.440751172 +0000 UTC m=+1285.588528863" Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.874697 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:12:35 crc kubenswrapper[4592]: I0929 17:12:35.874784 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.095248 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.095299 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.123236 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.186726 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.279957 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.390432 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.390841 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="dnsmasq-dns" containerID="cri-o://8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549" gracePeriod=10 Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.514738 4592 generic.go:334] "Generic (PLEG): container finished" podID="9b018763-574c-4186-9191-3342af9acbf3" containerID="96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8" exitCode=0 Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.514963 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pcfkx" event={"ID":"9b018763-574c-4186-9191-3342af9acbf3","Type":"ContainerDied","Data":"96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8"} Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.785847 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.853438 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.161:5353: connect: connection refused" Sep 29 17:12:36 crc kubenswrapper[4592]: W0929 17:12:36.908237 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-134ed776d0bd5435ba539c5639fb2f3873d8527e0ab23cdbb8aa56c6ab1d3ad3": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-134ed776d0bd5435ba539c5639fb2f3873d8527e0ab23cdbb8aa56c6ab1d3ad3: no such file or directory Sep 29 17:12:36 crc kubenswrapper[4592]: W0929 17:12:36.908985 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf77059a_3702_42ad_a217_1d527ae7c8af.slice/crio-conmon-be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf77059a_3702_42ad_a217_1d527ae7c8af.slice/crio-conmon-be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1.scope: no such file or directory Sep 29 17:12:36 crc kubenswrapper[4592]: W0929 17:12:36.909188 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf77059a_3702_42ad_a217_1d527ae7c8af.slice/crio-be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf77059a_3702_42ad_a217_1d527ae7c8af.slice/crio-be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1.scope: no such file or directory Sep 29 17:12:36 crc kubenswrapper[4592]: W0929 17:12:36.916378 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-conmon-a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-conmon-a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26.scope: no such file or directory Sep 29 17:12:36 crc kubenswrapper[4592]: W0929 17:12:36.931460 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-a1d0e58da58e37d3fd2940387eaeea2757d0590d938a7a91a6881ad95c294a26.scope: no such file or directory Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.963355 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:12:36 crc kubenswrapper[4592]: I0929 17:12:36.963688 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.474983 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.537935 4592 generic.go:334] "Generic (PLEG): container finished" podID="5e1b0183-5638-42d4-85fc-498f16b43305" containerID="8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf" exitCode=137 Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.538011 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerDied","Data":"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf"} Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.538043 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e1b0183-5638-42d4-85fc-498f16b43305","Type":"ContainerDied","Data":"dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0"} Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.538065 4592 scope.go:117] "RemoveContainer" containerID="8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.538294 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.546754 4592 generic.go:334] "Generic (PLEG): container finished" podID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerID="8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549" exitCode=0 Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.546981 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" event={"ID":"6ddab092-5ab2-420d-8d8f-30ce7633185d","Type":"ContainerDied","Data":"8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549"} Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580258 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580407 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khj9m\" (UniqueName: \"kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580434 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580458 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580489 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580513 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.580550 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd\") pod \"5e1b0183-5638-42d4-85fc-498f16b43305\" (UID: \"5e1b0183-5638-42d4-85fc-498f16b43305\") " Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.581658 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.581968 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.600228 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts" (OuterVolumeSpecName: "scripts") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.616278 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m" (OuterVolumeSpecName: "kube-api-access-khj9m") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "kube-api-access-khj9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.706831 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khj9m\" (UniqueName: \"kubernetes.io/projected/5e1b0183-5638-42d4-85fc-498f16b43305-kube-api-access-khj9m\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.706869 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.706883 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.706894 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e1b0183-5638-42d4-85fc-498f16b43305-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.724257 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.814079 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.884416 4592 scope.go:117] "RemoveContainer" containerID="bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.887339 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data" (OuterVolumeSpecName: "config-data") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.888787 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.926475 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.964713 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e1b0183-5638-42d4-85fc-498f16b43305" (UID: "5e1b0183-5638-42d4-85fc-498f16b43305"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:37 crc kubenswrapper[4592]: I0929 17:12:37.966892 4592 scope.go:117] "RemoveContainer" containerID="c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.001379 4592 scope.go:117] "RemoveContainer" containerID="8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.029774 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.029875 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.029914 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.029934 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzb9k\" (UniqueName: \"kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.029968 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.030012 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config\") pod \"6ddab092-5ab2-420d-8d8f-30ce7633185d\" (UID: \"6ddab092-5ab2-420d-8d8f-30ce7633185d\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.030381 4592 scope.go:117] "RemoveContainer" containerID="8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.030501 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e1b0183-5638-42d4-85fc-498f16b43305-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.032825 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf\": container with ID starting with 8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf not found: ID does not exist" containerID="8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.032885 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf"} err="failed to get container status \"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf\": rpc error: code = NotFound desc = could not find container \"8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf\": container with ID starting with 8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf not found: ID does not exist" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.032910 4592 scope.go:117] "RemoveContainer" containerID="bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.036421 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748\": container with ID starting with bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748 not found: ID does not exist" containerID="bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.036460 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748"} err="failed to get container status \"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748\": rpc error: code = NotFound desc = could not find container \"bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748\": container with ID starting with bb288cb66337d156b85873476d9bda3c6e4735c0eb2caf32d3087adba4030748 not found: ID does not exist" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.036481 4592 scope.go:117] "RemoveContainer" containerID="c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.036746 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a\": container with ID starting with c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a not found: ID does not exist" containerID="c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.036828 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a"} err="failed to get container status \"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a\": rpc error: code = NotFound desc = could not find container \"c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a\": container with ID starting with c9a6e5398d8342330d725755a152a53ae3dae60a62bb8b5cedc0fc3702e2af0a not found: ID does not exist" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.036856 4592 scope.go:117] "RemoveContainer" containerID="8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.037355 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b\": container with ID starting with 8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b not found: ID does not exist" containerID="8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.037378 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b"} err="failed to get container status \"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b\": rpc error: code = NotFound desc = could not find container \"8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b\": container with ID starting with 8135898132ce416b508baaab4b493948ad867190950c02589a98e99d2643257b not found: ID does not exist" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.067264 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k" (OuterVolumeSpecName: "kube-api-access-zzb9k") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "kube-api-access-zzb9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.092181 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.102720 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config" (OuterVolumeSpecName: "config") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.114048 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.120711 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.133330 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.133364 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzb9k\" (UniqueName: \"kubernetes.io/projected/6ddab092-5ab2-420d-8d8f-30ce7633185d-kube-api-access-zzb9k\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.133376 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.133384 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.135831 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.146816 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6ddab092-5ab2-420d-8d8f-30ce7633185d" (UID: "6ddab092-5ab2-420d-8d8f-30ce7633185d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.184879 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.208040 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225417 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225838 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="init" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225853 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="init" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225867 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-notification-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225873 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-notification-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225888 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-central-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225893 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-central-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225903 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="dnsmasq-dns" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225908 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="dnsmasq-dns" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225921 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="sg-core" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225927 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="sg-core" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225940 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="proxy-httpd" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225947 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="proxy-httpd" Sep 29 17:12:38 crc kubenswrapper[4592]: E0929 17:12:38.225967 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b018763-574c-4186-9191-3342af9acbf3" containerName="nova-manage" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.225973 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b018763-574c-4186-9191-3342af9acbf3" containerName="nova-manage" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226124 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-notification-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226161 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b018763-574c-4186-9191-3342af9acbf3" containerName="nova-manage" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226170 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="sg-core" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226185 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" containerName="dnsmasq-dns" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226193 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="proxy-httpd" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.226203 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" containerName="ceilometer-central-agent" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.228412 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.234979 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.235962 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data\") pod \"9b018763-574c-4186-9191-3342af9acbf3\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.236198 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts\") pod \"9b018763-574c-4186-9191-3342af9acbf3\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.236329 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqhfv\" (UniqueName: \"kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv\") pod \"9b018763-574c-4186-9191-3342af9acbf3\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.236503 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle\") pod \"9b018763-574c-4186-9191-3342af9acbf3\" (UID: \"9b018763-574c-4186-9191-3342af9acbf3\") " Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.236756 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.245646 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.245926 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6ddab092-5ab2-420d-8d8f-30ce7633185d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.253824 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.262817 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts" (OuterVolumeSpecName: "scripts") pod "9b018763-574c-4186-9191-3342af9acbf3" (UID: "9b018763-574c-4186-9191-3342af9acbf3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.263375 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv" (OuterVolumeSpecName: "kube-api-access-jqhfv") pod "9b018763-574c-4186-9191-3342af9acbf3" (UID: "9b018763-574c-4186-9191-3342af9acbf3"). InnerVolumeSpecName "kube-api-access-jqhfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.305345 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data" (OuterVolumeSpecName: "config-data") pod "9b018763-574c-4186-9191-3342af9acbf3" (UID: "9b018763-574c-4186-9191-3342af9acbf3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.311928 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b018763-574c-4186-9191-3342af9acbf3" (UID: "9b018763-574c-4186-9191-3342af9acbf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347577 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg2rx\" (UniqueName: \"kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347670 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347727 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347749 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347824 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347852 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.347938 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.348225 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.348245 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.348253 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqhfv\" (UniqueName: \"kubernetes.io/projected/9b018763-574c-4186-9191-3342af9acbf3-kube-api-access-jqhfv\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.348263 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b018763-574c-4186-9191-3342af9acbf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450512 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg2rx\" (UniqueName: \"kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450567 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450593 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450612 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450674 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450697 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450733 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.450980 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.451322 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.456071 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.456367 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.457187 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.459064 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.481798 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg2rx\" (UniqueName: \"kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx\") pod \"ceilometer-0\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.562614 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" event={"ID":"6ddab092-5ab2-420d-8d8f-30ce7633185d","Type":"ContainerDied","Data":"c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c"} Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.562668 4592 scope.go:117] "RemoveContainer" containerID="8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.562781 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-94fbn" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.570521 4592 generic.go:334] "Generic (PLEG): container finished" podID="feab94f4-b3ec-465b-8d59-22643f853dc8" containerID="f52982daad45aae0770d61f69be5335ba029b80378ba5c632387ba50df28b73c" exitCode=0 Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.570763 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" event={"ID":"feab94f4-b3ec-465b-8d59-22643f853dc8","Type":"ContainerDied","Data":"f52982daad45aae0770d61f69be5335ba029b80378ba5c632387ba50df28b73c"} Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.574649 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.582860 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-pcfkx" event={"ID":"9b018763-574c-4186-9191-3342af9acbf3","Type":"ContainerDied","Data":"0143fe9a7abb32f99f0a0235e584c6f3e0084d323a51da62cfaa1d09bc0f535d"} Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.583076 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0143fe9a7abb32f99f0a0235e584c6f3e0084d323a51da62cfaa1d09bc0f535d" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.583365 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-pcfkx" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.668268 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.684883 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-94fbn"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.698940 4592 scope.go:117] "RemoveContainer" containerID="ca1704287963a5c83e54fa0e1b419f11f34980053ebd6be3dab83dfef69bded2" Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.774305 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.774532 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-log" containerID="cri-o://378aee20856f944412f2afaeba4c867d23cba3ace3badde6bc3a475bfd570173" gracePeriod=30 Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.774671 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-api" containerID="cri-o://428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5" gracePeriod=30 Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.789993 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.790621 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="2eb51866-0f80-4027-a1bd-1519aee01031" containerName="nova-scheduler-scheduler" containerID="cri-o://73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a" gracePeriod=30 Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.825394 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.825599 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-log" containerID="cri-o://c85808fceb0a02e5dc9ba730a92429a2e20315f055c5c795f4836e02a5d240c1" gracePeriod=30 Sep 29 17:12:38 crc kubenswrapper[4592]: I0929 17:12:38.825727 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-metadata" containerID="cri-o://98829f86e15e8b4efb2cab2a6ce0ea8796c21bc4044e99d113bb9a0bd27a1182" gracePeriod=30 Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.195079 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e1b0183-5638-42d4-85fc-498f16b43305" path="/var/lib/kubelet/pods/5e1b0183-5638-42d4-85fc-498f16b43305/volumes" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.196593 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ddab092-5ab2-420d-8d8f-30ce7633185d" path="/var/lib/kubelet/pods/6ddab092-5ab2-420d-8d8f-30ce7633185d/volumes" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.282272 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.288024 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.387363 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.387403 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.622820 4592 generic.go:334] "Generic (PLEG): container finished" podID="fca89221-1609-444a-80cf-a7526f91a407" containerID="98829f86e15e8b4efb2cab2a6ce0ea8796c21bc4044e99d113bb9a0bd27a1182" exitCode=0 Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.622854 4592 generic.go:334] "Generic (PLEG): container finished" podID="fca89221-1609-444a-80cf-a7526f91a407" containerID="c85808fceb0a02e5dc9ba730a92429a2e20315f055c5c795f4836e02a5d240c1" exitCode=143 Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.622923 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerDied","Data":"98829f86e15e8b4efb2cab2a6ce0ea8796c21bc4044e99d113bb9a0bd27a1182"} Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.622952 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerDied","Data":"c85808fceb0a02e5dc9ba730a92429a2e20315f055c5c795f4836e02a5d240c1"} Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.643518 4592 generic.go:334] "Generic (PLEG): container finished" podID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerID="378aee20856f944412f2afaeba4c867d23cba3ace3badde6bc3a475bfd570173" exitCode=143 Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.643626 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerDied","Data":"378aee20856f944412f2afaeba4c867d23cba3ace3badde6bc3a475bfd570173"} Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.650390 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerStarted","Data":"c8f68b6408abd45e19115a4597fb4b8420f63b58a57ed35cd2a359fef7e0e740"} Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.737628 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.892901 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs\") pod \"fca89221-1609-444a-80cf-a7526f91a407\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.893492 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs" (OuterVolumeSpecName: "logs") pod "fca89221-1609-444a-80cf-a7526f91a407" (UID: "fca89221-1609-444a-80cf-a7526f91a407"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.893555 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data\") pod \"fca89221-1609-444a-80cf-a7526f91a407\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.893664 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs\") pod \"fca89221-1609-444a-80cf-a7526f91a407\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.893706 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle\") pod \"fca89221-1609-444a-80cf-a7526f91a407\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.893778 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8k9n\" (UniqueName: \"kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n\") pod \"fca89221-1609-444a-80cf-a7526f91a407\" (UID: \"fca89221-1609-444a-80cf-a7526f91a407\") " Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.894305 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fca89221-1609-444a-80cf-a7526f91a407-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.907340 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n" (OuterVolumeSpecName: "kube-api-access-r8k9n") pod "fca89221-1609-444a-80cf-a7526f91a407" (UID: "fca89221-1609-444a-80cf-a7526f91a407"). InnerVolumeSpecName "kube-api-access-r8k9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:39 crc kubenswrapper[4592]: E0929 17:12:39.944855 4592 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/3dfb21f9aee32e5006ec8f6e57aeb9dafe143e15f6ff851e5758dcedccfd4052/diff" to get inode usage: stat /var/lib/containers/storage/overlay/3dfb21f9aee32e5006ec8f6e57aeb9dafe143e15f6ff851e5758dcedccfd4052/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack_dnsmasq-dns-6bb4fc677f-94fbn_6ddab092-5ab2-420d-8d8f-30ce7633185d/dnsmasq-dns/0.log" to get inode usage: stat /var/log/pods/openstack_dnsmasq-dns-6bb4fc677f-94fbn_6ddab092-5ab2-420d-8d8f-30ce7633185d/dnsmasq-dns/0.log: no such file or directory Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.960837 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data" (OuterVolumeSpecName: "config-data") pod "fca89221-1609-444a-80cf-a7526f91a407" (UID: "fca89221-1609-444a-80cf-a7526f91a407"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.974516 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fca89221-1609-444a-80cf-a7526f91a407" (UID: "fca89221-1609-444a-80cf-a7526f91a407"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.995760 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.995875 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:39 crc kubenswrapper[4592]: I0929 17:12:39.995888 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8k9n\" (UniqueName: \"kubernetes.io/projected/fca89221-1609-444a-80cf-a7526f91a407-kube-api-access-r8k9n\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.020397 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "fca89221-1609-444a-80cf-a7526f91a407" (UID: "fca89221-1609-444a-80cf-a7526f91a407"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.097050 4592 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/fca89221-1609-444a-80cf-a7526f91a407-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.141991 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.198458 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle\") pod \"feab94f4-b3ec-465b-8d59-22643f853dc8\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.198578 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts\") pod \"feab94f4-b3ec-465b-8d59-22643f853dc8\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.198638 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvgg9\" (UniqueName: \"kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9\") pod \"feab94f4-b3ec-465b-8d59-22643f853dc8\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.198701 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data\") pod \"feab94f4-b3ec-465b-8d59-22643f853dc8\" (UID: \"feab94f4-b3ec-465b-8d59-22643f853dc8\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.211022 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9" (OuterVolumeSpecName: "kube-api-access-tvgg9") pod "feab94f4-b3ec-465b-8d59-22643f853dc8" (UID: "feab94f4-b3ec-465b-8d59-22643f853dc8"). InnerVolumeSpecName "kube-api-access-tvgg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.215289 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts" (OuterVolumeSpecName: "scripts") pod "feab94f4-b3ec-465b-8d59-22643f853dc8" (UID: "feab94f4-b3ec-465b-8d59-22643f853dc8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.268216 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feab94f4-b3ec-465b-8d59-22643f853dc8" (UID: "feab94f4-b3ec-465b-8d59-22643f853dc8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.276312 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.282909 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data" (OuterVolumeSpecName: "config-data") pod "feab94f4-b3ec-465b-8d59-22643f853dc8" (UID: "feab94f4-b3ec-465b-8d59-22643f853dc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.301383 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.301434 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvgg9\" (UniqueName: \"kubernetes.io/projected/feab94f4-b3ec-465b-8d59-22643f853dc8-kube-api-access-tvgg9\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.301449 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.301460 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feab94f4-b3ec-465b-8d59-22643f853dc8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.402162 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data\") pod \"2eb51866-0f80-4027-a1bd-1519aee01031\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.402234 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkrwc\" (UniqueName: \"kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc\") pod \"2eb51866-0f80-4027-a1bd-1519aee01031\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.402374 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle\") pod \"2eb51866-0f80-4027-a1bd-1519aee01031\" (UID: \"2eb51866-0f80-4027-a1bd-1519aee01031\") " Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.417336 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc" (OuterVolumeSpecName: "kube-api-access-gkrwc") pod "2eb51866-0f80-4027-a1bd-1519aee01031" (UID: "2eb51866-0f80-4027-a1bd-1519aee01031"). InnerVolumeSpecName "kube-api-access-gkrwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.428095 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data" (OuterVolumeSpecName: "config-data") pod "2eb51866-0f80-4027-a1bd-1519aee01031" (UID: "2eb51866-0f80-4027-a1bd-1519aee01031"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.437684 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2eb51866-0f80-4027-a1bd-1519aee01031" (UID: "2eb51866-0f80-4027-a1bd-1519aee01031"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.505078 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkrwc\" (UniqueName: \"kubernetes.io/projected/2eb51866-0f80-4027-a1bd-1519aee01031-kube-api-access-gkrwc\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.505116 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.505125 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2eb51866-0f80-4027-a1bd-1519aee01031-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.680869 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: E0929 17:12:40.681536 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-log" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681553 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-log" Sep 29 17:12:40 crc kubenswrapper[4592]: E0929 17:12:40.681576 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eb51866-0f80-4027-a1bd-1519aee01031" containerName="nova-scheduler-scheduler" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681583 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eb51866-0f80-4027-a1bd-1519aee01031" containerName="nova-scheduler-scheduler" Sep 29 17:12:40 crc kubenswrapper[4592]: E0929 17:12:40.681592 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-metadata" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681598 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-metadata" Sep 29 17:12:40 crc kubenswrapper[4592]: E0929 17:12:40.681625 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feab94f4-b3ec-465b-8d59-22643f853dc8" containerName="nova-cell1-conductor-db-sync" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681632 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="feab94f4-b3ec-465b-8d59-22643f853dc8" containerName="nova-cell1-conductor-db-sync" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681798 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="feab94f4-b3ec-465b-8d59-22643f853dc8" containerName="nova-cell1-conductor-db-sync" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681812 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-log" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681827 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eb51866-0f80-4027-a1bd-1519aee01031" containerName="nova-scheduler-scheduler" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.681840 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca89221-1609-444a-80cf-a7526f91a407" containerName="nova-metadata-metadata" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.682438 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.702003 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" event={"ID":"feab94f4-b3ec-465b-8d59-22643f853dc8","Type":"ContainerDied","Data":"84309058ce7f67972c5c8d89b9509b55513342ab6e85de316bd6902cae63d9b0"} Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.702041 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84309058ce7f67972c5c8d89b9509b55513342ab6e85de316bd6902cae63d9b0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.702098 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-gn5bh" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.708857 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.710342 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"fca89221-1609-444a-80cf-a7526f91a407","Type":"ContainerDied","Data":"1c27f0e26a138cc8f98bbe28a049d27ad7cbf94709c1613e0f2f796f11bf7c40"} Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.710449 4592 scope.go:117] "RemoveContainer" containerID="98829f86e15e8b4efb2cab2a6ce0ea8796c21bc4044e99d113bb9a0bd27a1182" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.710609 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.729879 4592 generic.go:334] "Generic (PLEG): container finished" podID="2eb51866-0f80-4027-a1bd-1519aee01031" containerID="73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a" exitCode=0 Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.730016 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.730330 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2eb51866-0f80-4027-a1bd-1519aee01031","Type":"ContainerDied","Data":"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a"} Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.730366 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"2eb51866-0f80-4027-a1bd-1519aee01031","Type":"ContainerDied","Data":"9e52ee17dfa8e4bc32e60d8770d51fdea37859f4ff02871230346cd208a7b984"} Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.733392 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerStarted","Data":"7e6701a8aad1b44dc13f8a04622e2ed13cfcd60bfd7b798e732d2b9f860fee40"} Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.774371 4592 scope.go:117] "RemoveContainer" containerID="c85808fceb0a02e5dc9ba730a92429a2e20315f055c5c795f4836e02a5d240c1" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.785114 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.805569 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.815934 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.816104 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djttz\" (UniqueName: \"kubernetes.io/projected/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-kube-api-access-djttz\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.816245 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.834535 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.838302 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.846712 4592 scope.go:117] "RemoveContainer" containerID="73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.846727 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.846879 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.879701 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.887208 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.895990 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.900414 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.902091 4592 scope.go:117] "RemoveContainer" containerID="73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.903533 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:12:40 crc kubenswrapper[4592]: E0929 17:12:40.905883 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a\": container with ID starting with 73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a not found: ID does not exist" containerID="73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.905929 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a"} err="failed to get container status \"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a\": rpc error: code = NotFound desc = could not find container \"73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a\": container with ID starting with 73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a not found: ID does not exist" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.911252 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.912568 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917190 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917344 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917453 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djttz\" (UniqueName: \"kubernetes.io/projected/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-kube-api-access-djttz\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917481 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917541 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917572 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbdsj\" (UniqueName: \"kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917606 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.917619 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.925006 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.927005 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.932804 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:40 crc kubenswrapper[4592]: I0929 17:12:40.941901 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djttz\" (UniqueName: \"kubernetes.io/projected/bb359aaf-6eae-40d2-a14e-3a7a47e3a286-kube-api-access-djttz\") pod \"nova-cell1-conductor-0\" (UID: \"bb359aaf-6eae-40d2-a14e-3a7a47e3a286\") " pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.012798 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019313 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019605 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbdsj\" (UniqueName: \"kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019715 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019805 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019873 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.019963 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.020042 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.020128 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j52z8\" (UniqueName: \"kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.024114 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.026066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.026577 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.029730 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.043624 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbdsj\" (UniqueName: \"kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj\") pod \"nova-metadata-0\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.124055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.124093 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.124131 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j52z8\" (UniqueName: \"kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.140694 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.142945 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.169243 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j52z8\" (UniqueName: \"kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8\") pod \"nova-scheduler-0\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.208101 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.276679 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.321167 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eb51866-0f80-4027-a1bd-1519aee01031" path="/var/lib/kubelet/pods/2eb51866-0f80-4027-a1bd-1519aee01031/volumes" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.321827 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fca89221-1609-444a-80cf-a7526f91a407" path="/var/lib/kubelet/pods/fca89221-1609-444a-80cf-a7526f91a407/volumes" Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.539566 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.751616 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bb359aaf-6eae-40d2-a14e-3a7a47e3a286","Type":"ContainerStarted","Data":"80a28a58bcfef6355a4701c26d859b212769fb79c858ba423c1950141f089a8b"} Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.756105 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerStarted","Data":"1484b5d3077673fc809902af54809499782a7fa919400cbc44e7b131ac5d2fbe"} Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.845522 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:12:41 crc kubenswrapper[4592]: I0929 17:12:41.948296 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.783279 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerStarted","Data":"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.783597 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerStarted","Data":"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.783607 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerStarted","Data":"ef87b82d2fe1ea1fc61e57f2a9f63f7946661a7b68761745082022d352c1790b"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.787000 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"bb359aaf-6eae-40d2-a14e-3a7a47e3a286","Type":"ContainerStarted","Data":"82e8770014201e3abd784e25640a257b9dac83a1c7d58394ef4ddafc4aa911ae"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.788323 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.791340 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerStarted","Data":"ee788493b370795efc6a1158b5ec50f2596d7a4f0f829d9b31581415b707d7c5"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.793173 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0935c62-3820-41b3-afaf-2e8417804197","Type":"ContainerStarted","Data":"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.793209 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0935c62-3820-41b3-afaf-2e8417804197","Type":"ContainerStarted","Data":"bb5456da38301913aa5d7b74531709d3079f7fb13166c7bb2b2098ab7df1c46d"} Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.808027 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.808012277 podStartE2EDuration="2.808012277s" podCreationTimestamp="2025-09-29 17:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:42.806946008 +0000 UTC m=+1292.954723689" watchObservedRunningTime="2025-09-29 17:12:42.808012277 +0000 UTC m=+1292.955789958" Sep 29 17:12:42 crc kubenswrapper[4592]: I0929 17:12:42.839358 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.839342672 podStartE2EDuration="2.839342672s" podCreationTimestamp="2025-09-29 17:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:42.834380145 +0000 UTC m=+1292.982157846" watchObservedRunningTime="2025-09-29 17:12:42.839342672 +0000 UTC m=+1292.987120353" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.791869 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.804082 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerStarted","Data":"21e50874edd4c09a7f869b409acf1dc0f4db0aa91b07f1de3637b67c02524077"} Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.804410 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.814263 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=3.814246123 podStartE2EDuration="3.814246123s" podCreationTimestamp="2025-09-29 17:12:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:42.860463114 +0000 UTC m=+1293.008240785" watchObservedRunningTime="2025-09-29 17:12:43.814246123 +0000 UTC m=+1293.962023804" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.858521 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-749bb4c784-lnncs" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.870662 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.12601904 podStartE2EDuration="5.870645559s" podCreationTimestamp="2025-09-29 17:12:38 +0000 UTC" firstStartedPulling="2025-09-29 17:12:39.287823768 +0000 UTC m=+1289.435601449" lastFinishedPulling="2025-09-29 17:12:43.032450297 +0000 UTC m=+1293.180227968" observedRunningTime="2025-09-29 17:12:43.860420196 +0000 UTC m=+1294.008197877" watchObservedRunningTime="2025-09-29 17:12:43.870645559 +0000 UTC m=+1294.018423240" Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.939391 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.939703 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon-log" containerID="cri-o://c044f7528ceed902d66d79d1f760ee4728348faeae6cb231d66d1c9544900e03" gracePeriod=30 Sep 29 17:12:43 crc kubenswrapper[4592]: I0929 17:12:43.940227 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" containerID="cri-o://8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796" gracePeriod=30 Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.822289 4592 generic.go:334] "Generic (PLEG): container finished" podID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerID="428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5" exitCode=0 Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.823289 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerDied","Data":"428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5"} Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.823975 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3bcf8349-e3ac-4b67-a167-fadd65b18307","Type":"ContainerDied","Data":"ae81174309766f61ba31d249ecbf9414a1344d46b6bf761b50b90c9a3343b7c3"} Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.824052 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae81174309766f61ba31d249ecbf9414a1344d46b6bf761b50b90c9a3343b7c3" Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.881687 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.950181 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp4dn\" (UniqueName: \"kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn\") pod \"3bcf8349-e3ac-4b67-a167-fadd65b18307\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.950597 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle\") pod \"3bcf8349-e3ac-4b67-a167-fadd65b18307\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.950758 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs\") pod \"3bcf8349-e3ac-4b67-a167-fadd65b18307\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.950890 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data\") pod \"3bcf8349-e3ac-4b67-a167-fadd65b18307\" (UID: \"3bcf8349-e3ac-4b67-a167-fadd65b18307\") " Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.952886 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs" (OuterVolumeSpecName: "logs") pod "3bcf8349-e3ac-4b67-a167-fadd65b18307" (UID: "3bcf8349-e3ac-4b67-a167-fadd65b18307"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.972459 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn" (OuterVolumeSpecName: "kube-api-access-sp4dn") pod "3bcf8349-e3ac-4b67-a167-fadd65b18307" (UID: "3bcf8349-e3ac-4b67-a167-fadd65b18307"). InnerVolumeSpecName "kube-api-access-sp4dn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:12:45 crc kubenswrapper[4592]: I0929 17:12:45.995496 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data" (OuterVolumeSpecName: "config-data") pod "3bcf8349-e3ac-4b67-a167-fadd65b18307" (UID: "3bcf8349-e3ac-4b67-a167-fadd65b18307"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.004715 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3bcf8349-e3ac-4b67-a167-fadd65b18307" (UID: "3bcf8349-e3ac-4b67-a167-fadd65b18307"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.047611 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.056357 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.056393 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp4dn\" (UniqueName: \"kubernetes.io/projected/3bcf8349-e3ac-4b67-a167-fadd65b18307-kube-api-access-sp4dn\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.056405 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3bcf8349-e3ac-4b67-a167-fadd65b18307-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.056414 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3bcf8349-e3ac-4b67-a167-fadd65b18307-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.208355 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.208982 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.277705 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.834011 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.912822 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.936911 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.941186 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:46 crc kubenswrapper[4592]: E0929 17:12:46.941611 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-log" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.941634 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-log" Sep 29 17:12:46 crc kubenswrapper[4592]: E0929 17:12:46.941670 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-api" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.941676 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-api" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.941891 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-log" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.941908 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" containerName="nova-api-api" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.942856 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.945763 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.946669 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.977647 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.977747 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmwt4\" (UniqueName: \"kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.977803 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:46 crc kubenswrapper[4592]: I0929 17:12:46.977836 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.079127 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.079200 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.079252 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.079323 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmwt4\" (UniqueName: \"kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.079665 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.085881 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.086007 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.114219 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmwt4\" (UniqueName: \"kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4\") pod \"nova-api-0\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.193305 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bcf8349-e3ac-4b67-a167-fadd65b18307" path="/var/lib/kubelet/pods/3bcf8349-e3ac-4b67-a167-fadd65b18307/volumes" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.266552 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.386280 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:60804->10.217.0.145:8443: read: connection reset by peer" Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.799619 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.851674 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerStarted","Data":"f747e76761b36e7907ae0512147d61fbdf5db991ac507d01422f634de36ba57c"} Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.855477 4592 generic.go:334] "Generic (PLEG): container finished" podID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerID="8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796" exitCode=0 Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.855537 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerDied","Data":"8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796"} Sep 29 17:12:47 crc kubenswrapper[4592]: I0929 17:12:47.855571 4592 scope.go:117] "RemoveContainer" containerID="cda228aa120a90f351fae823ce96a1ef649a815a578fb0163561c02a9e5cf929" Sep 29 17:12:48 crc kubenswrapper[4592]: I0929 17:12:48.866909 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerStarted","Data":"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99"} Sep 29 17:12:48 crc kubenswrapper[4592]: I0929 17:12:48.868252 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerStarted","Data":"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b"} Sep 29 17:12:48 crc kubenswrapper[4592]: I0929 17:12:48.895854 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.895833422 podStartE2EDuration="2.895833422s" podCreationTimestamp="2025-09-29 17:12:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:12:48.886348666 +0000 UTC m=+1299.034126357" watchObservedRunningTime="2025-09-29 17:12:48.895833422 +0000 UTC m=+1299.043611103" Sep 29 17:12:51 crc kubenswrapper[4592]: I0929 17:12:51.208646 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 17:12:51 crc kubenswrapper[4592]: I0929 17:12:51.209303 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 17:12:51 crc kubenswrapper[4592]: I0929 17:12:51.277397 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 17:12:51 crc kubenswrapper[4592]: I0929 17:12:51.303489 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 17:12:51 crc kubenswrapper[4592]: I0929 17:12:51.954084 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 17:12:52 crc kubenswrapper[4592]: I0929 17:12:52.223371 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:12:52 crc kubenswrapper[4592]: I0929 17:12:52.223371 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:12:56 crc kubenswrapper[4592]: I0929 17:12:56.312941 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:12:57 crc kubenswrapper[4592]: I0929 17:12:57.266986 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:12:57 crc kubenswrapper[4592]: I0929 17:12:57.267068 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:12:58 crc kubenswrapper[4592]: I0929 17:12:58.350360 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:12:58 crc kubenswrapper[4592]: I0929 17:12:58.350663 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.197:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Sep 29 17:13:01 crc kubenswrapper[4592]: I0929 17:13:01.216976 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 17:13:01 crc kubenswrapper[4592]: I0929 17:13:01.219141 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 17:13:01 crc kubenswrapper[4592]: I0929 17:13:01.223849 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.040439 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.436754 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-conmon-ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-conmon-ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1.scope: no such file or directory Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.437009 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice/crio-ef063d5aaee70ce4b7e47b5a773fdee1017e34097ad35df3308ee0e3381bffb1.scope: no such file or directory Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.437028 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice/crio-conmon-428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice/crio-conmon-428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5.scope: no such file or directory Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.437043 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice/crio-428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice/crio-428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5.scope: no such file or directory Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.439352 4592 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfca89221_1609_444a_80cf_a7526f91a407.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfca89221_1609_444a_80cf_a7526f91a407.slice: no such file or directory Sep 29 17:13:02 crc kubenswrapper[4592]: W0929 17:13:02.451139 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eb51866_0f80_4027_a1bd_1519aee01031.slice/crio-73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a.scope WatchSource:0}: Error finding container 73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a: Status 404 returned error can't find the container with id 73c1981412be6776731c8407bd675a507ff6cb901c44defb340d0db27dffc78a Sep 29 17:13:02 crc kubenswrapper[4592]: E0929 17:13:02.722854 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e1b0183_5638_42d4_85fc_498f16b43305.slice/crio-dff91f3b5c370ff4ff696b39ccd2a30f4397320b7be25e39421bfc9e070604e0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b018763_574c_4186_9191_3342af9acbf3.slice/crio-96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf69f0ce5_b54d_4f2a_854a_8e9e62d7efb1.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b018763_574c_4186_9191_3342af9acbf3.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ddab092_5ab2_420d_8d8f_30ce7633185d.slice/crio-8b717471f9d3e22895367f6382947e0627642aa75f7b3dfec674d3ebf781a549.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b018763_574c_4186_9191_3342af9acbf3.slice/crio-conmon-96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b018763_574c_4186_9191_3342af9acbf3.slice/crio-0143fe9a7abb32f99f0a0235e584c6f3e0084d323a51da62cfaa1d09bc0f535d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e1b0183_5638_42d4_85fc_498f16b43305.slice/crio-conmon-8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e1b0183_5638_42d4_85fc_498f16b43305.slice/crio-8699b1755564f78c548d890ef7a0ec16bfb85d0e8fe36c7e014d6335f37953bf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde56880e_c3e2_46db_b63d_c46acd0f6e1f.slice/crio-conmon-8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ddab092_5ab2_420d_8d8f_30ce7633185d.slice/crio-c297e483e0fa50e5069a8e02c70ce98db4453a244f987554faba79eafeafb03c\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e1b0183_5638_42d4_85fc_498f16b43305.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cc986fa_6620_43ff_ae05_11c71e326035.slice/crio-eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ddab092_5ab2_420d_8d8f_30ce7633185d.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde56880e_c3e2_46db_b63d_c46acd0f6e1f.slice/crio-8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3bcf8349_e3ac_4b67_a167_fadd65b18307.slice/crio-ae81174309766f61ba31d249ecbf9414a1344d46b6bf761b50b90c9a3343b7c3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cc986fa_6620_43ff_ae05_11c71e326035.slice/crio-conmon-eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2eb51866_0f80_4027_a1bd_1519aee01031.slice\": RecentStats: unable to find data in memory cache]" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.813515 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.891741 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data\") pod \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.891824 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b49d2\" (UniqueName: \"kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2\") pod \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.891843 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle\") pod \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\" (UID: \"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e\") " Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.907476 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2" (OuterVolumeSpecName: "kube-api-access-b49d2") pod "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" (UID: "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e"). InnerVolumeSpecName "kube-api-access-b49d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.920812 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" (UID: "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.921822 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data" (OuterVolumeSpecName: "config-data") pod "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" (UID: "d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.993590 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.993624 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b49d2\" (UniqueName: \"kubernetes.io/projected/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-kube-api-access-b49d2\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:02 crc kubenswrapper[4592]: I0929 17:13:02.993634 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.043316 4592 generic.go:334] "Generic (PLEG): container finished" podID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" containerID="bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758" exitCode=137 Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.043380 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.043374 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e","Type":"ContainerDied","Data":"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758"} Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.043435 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e","Type":"ContainerDied","Data":"1adc9af4376b2e2dc18190abbd782ffe586aacfd3b9a4c39e46494bfe3ce5940"} Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.043457 4592 scope.go:117] "RemoveContainer" containerID="bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.075896 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.079736 4592 scope.go:117] "RemoveContainer" containerID="bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758" Sep 29 17:13:03 crc kubenswrapper[4592]: E0929 17:13:03.080132 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758\": container with ID starting with bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758 not found: ID does not exist" containerID="bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.080199 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758"} err="failed to get container status \"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758\": rpc error: code = NotFound desc = could not find container \"bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758\": container with ID starting with bdcaa6bcece34ba3950616774bf8e5402c032ebe79a6609ce9ea042fc3c24758 not found: ID does not exist" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.085968 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.113629 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:13:03 crc kubenswrapper[4592]: E0929 17:13:03.114305 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.114338 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.114658 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" containerName="nova-cell1-novncproxy-novncproxy" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.115723 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.118000 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.118544 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.119107 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.156608 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.194922 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e" path="/var/lib/kubelet/pods/d6a825f4-1ebc-4dee-a6f9-cdb94ecf370e/volumes" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.200636 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.200747 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.200779 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.200934 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.201026 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9wbx\" (UniqueName: \"kubernetes.io/projected/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-kube-api-access-n9wbx\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.302547 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.302595 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.302676 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.302712 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9wbx\" (UniqueName: \"kubernetes.io/projected/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-kube-api-access-n9wbx\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.302774 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.306170 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.306812 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.308667 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.310588 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.319936 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9wbx\" (UniqueName: \"kubernetes.io/projected/a998c4c4-de7e-4c25-b2c3-87d54e3b9e56-kube-api-access-n9wbx\") pod \"nova-cell1-novncproxy-0\" (UID: \"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56\") " pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.464456 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:03 crc kubenswrapper[4592]: I0929 17:13:03.939757 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Sep 29 17:13:04 crc kubenswrapper[4592]: I0929 17:13:04.052056 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56","Type":"ContainerStarted","Data":"8234ddba53fc7dc88efc0adcc49758469d2f5cbc0371f3f9490bbe5fb13cee1b"} Sep 29 17:13:05 crc kubenswrapper[4592]: I0929 17:13:05.069690 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a998c4c4-de7e-4c25-b2c3-87d54e3b9e56","Type":"ContainerStarted","Data":"235d05142d69b50e52a3840682a0431c7f4ed2d6f9e2dff7624e537f06edefad"} Sep 29 17:13:05 crc kubenswrapper[4592]: I0929 17:13:05.103700 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.103672992 podStartE2EDuration="2.103672992s" podCreationTimestamp="2025-09-29 17:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:05.095220353 +0000 UTC m=+1315.242998114" watchObservedRunningTime="2025-09-29 17:13:05.103672992 +0000 UTC m=+1315.251450693" Sep 29 17:13:06 crc kubenswrapper[4592]: I0929 17:13:06.312805 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-c9567f99b-8nh47" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Sep 29 17:13:06 crc kubenswrapper[4592]: I0929 17:13:06.313426 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:13:07 crc kubenswrapper[4592]: I0929 17:13:07.270634 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 17:13:07 crc kubenswrapper[4592]: I0929 17:13:07.271259 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 17:13:07 crc kubenswrapper[4592]: I0929 17:13:07.271773 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 17:13:07 crc kubenswrapper[4592]: I0929 17:13:07.274704 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.100437 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.103893 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.322465 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.324701 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.348174 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405689 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405739 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc66s\" (UniqueName: \"kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405771 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405818 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405892 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.405931 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.465899 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.509753 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.509863 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.509921 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.509950 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc66s\" (UniqueName: \"kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.509975 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.510038 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.511177 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.512018 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.512071 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.512767 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.516320 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.542916 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc66s\" (UniqueName: \"kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s\") pod \"dnsmasq-dns-5c7b6c5df9-qkvjb\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.597415 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 17:13:08 crc kubenswrapper[4592]: I0929 17:13:08.664077 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:09 crc kubenswrapper[4592]: I0929 17:13:09.155533 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:13:09 crc kubenswrapper[4592]: W0929 17:13:09.159833 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6330cf10_7696_474e_b294_e7f668d89c34.slice/crio-6d94d677963ed84066ec9b0c4848c1d0e82d570f4ef54c45222cc1bb65d58f0d WatchSource:0}: Error finding container 6d94d677963ed84066ec9b0c4848c1d0e82d570f4ef54c45222cc1bb65d58f0d: Status 404 returned error can't find the container with id 6d94d677963ed84066ec9b0c4848c1d0e82d570f4ef54c45222cc1bb65d58f0d Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.123432 4592 generic.go:334] "Generic (PLEG): container finished" podID="6330cf10-7696-474e-b294-e7f668d89c34" containerID="00fb7b3ebf80f0250565537fe4e0ec429452cf3525a1039c2cd2cf4488f28e7a" exitCode=0 Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.123480 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" event={"ID":"6330cf10-7696-474e-b294-e7f668d89c34","Type":"ContainerDied","Data":"00fb7b3ebf80f0250565537fe4e0ec429452cf3525a1039c2cd2cf4488f28e7a"} Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.124210 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" event={"ID":"6330cf10-7696-474e-b294-e7f668d89c34","Type":"ContainerStarted","Data":"6d94d677963ed84066ec9b0c4848c1d0e82d570f4ef54c45222cc1bb65d58f0d"} Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.553852 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.554283 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="proxy-httpd" containerID="cri-o://21e50874edd4c09a7f869b409acf1dc0f4db0aa91b07f1de3637b67c02524077" gracePeriod=30 Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.554447 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="sg-core" containerID="cri-o://ee788493b370795efc6a1158b5ec50f2596d7a4f0f829d9b31581415b707d7c5" gracePeriod=30 Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.554519 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-notification-agent" containerID="cri-o://1484b5d3077673fc809902af54809499782a7fa919400cbc44e7b131ac5d2fbe" gracePeriod=30 Sep 29 17:13:10 crc kubenswrapper[4592]: I0929 17:13:10.554254 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-central-agent" containerID="cri-o://7e6701a8aad1b44dc13f8a04622e2ed13cfcd60bfd7b798e732d2b9f860fee40" gracePeriod=30 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.118988 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134619 4592 generic.go:334] "Generic (PLEG): container finished" podID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerID="21e50874edd4c09a7f869b409acf1dc0f4db0aa91b07f1de3637b67c02524077" exitCode=0 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134648 4592 generic.go:334] "Generic (PLEG): container finished" podID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerID="ee788493b370795efc6a1158b5ec50f2596d7a4f0f829d9b31581415b707d7c5" exitCode=2 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134656 4592 generic.go:334] "Generic (PLEG): container finished" podID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerID="7e6701a8aad1b44dc13f8a04622e2ed13cfcd60bfd7b798e732d2b9f860fee40" exitCode=0 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134690 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerDied","Data":"21e50874edd4c09a7f869b409acf1dc0f4db0aa91b07f1de3637b67c02524077"} Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134719 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerDied","Data":"ee788493b370795efc6a1158b5ec50f2596d7a4f0f829d9b31581415b707d7c5"} Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.134729 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerDied","Data":"7e6701a8aad1b44dc13f8a04622e2ed13cfcd60bfd7b798e732d2b9f860fee40"} Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.137313 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-log" containerID="cri-o://3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b" gracePeriod=30 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.138245 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" event={"ID":"6330cf10-7696-474e-b294-e7f668d89c34","Type":"ContainerStarted","Data":"fda0ce31a58c6b36b96b469cfdaffdba992eb2d134aba159ea0b39c5b2be149b"} Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.138272 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.138305 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-api" containerID="cri-o://3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99" gracePeriod=30 Sep 29 17:13:11 crc kubenswrapper[4592]: I0929 17:13:11.177786 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" podStartSLOduration=3.177764649 podStartE2EDuration="3.177764649s" podCreationTimestamp="2025-09-29 17:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:11.170564095 +0000 UTC m=+1321.318341776" watchObservedRunningTime="2025-09-29 17:13:11.177764649 +0000 UTC m=+1321.325542340" Sep 29 17:13:12 crc kubenswrapper[4592]: I0929 17:13:12.148750 4592 generic.go:334] "Generic (PLEG): container finished" podID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerID="3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b" exitCode=143 Sep 29 17:13:12 crc kubenswrapper[4592]: I0929 17:13:12.148969 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerDied","Data":"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b"} Sep 29 17:13:13 crc kubenswrapper[4592]: I0929 17:13:13.465580 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:13 crc kubenswrapper[4592]: I0929 17:13:13.488766 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.182119 4592 generic.go:334] "Generic (PLEG): container finished" podID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerID="c044f7528ceed902d66d79d1f760ee4728348faeae6cb231d66d1c9544900e03" exitCode=137 Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.182508 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerDied","Data":"c044f7528ceed902d66d79d1f760ee4728348faeae6cb231d66d1c9544900e03"} Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.185319 4592 generic.go:334] "Generic (PLEG): container finished" podID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerID="1484b5d3077673fc809902af54809499782a7fa919400cbc44e7b131ac5d2fbe" exitCode=0 Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.186867 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerDied","Data":"1484b5d3077673fc809902af54809499782a7fa919400cbc44e7b131ac5d2fbe"} Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.214511 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.385402 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.449609 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-m6cdw"] Sep 29 17:13:14 crc kubenswrapper[4592]: E0929 17:13:14.450023 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-central-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450035 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-central-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: E0929 17:13:14.450051 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="sg-core" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450056 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="sg-core" Sep 29 17:13:14 crc kubenswrapper[4592]: E0929 17:13:14.450065 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="proxy-httpd" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450072 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="proxy-httpd" Sep 29 17:13:14 crc kubenswrapper[4592]: E0929 17:13:14.450096 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-notification-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450102 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-notification-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450356 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="proxy-httpd" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450379 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-notification-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.450395 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="sg-core" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.453083 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" containerName="ceilometer-central-agent" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.453898 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.458038 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.458278 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.468374 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-m6cdw"] Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.533829 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555228 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg2rx\" (UniqueName: \"kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555281 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555306 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555345 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555383 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555418 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555465 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555540 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbc6t\" (UniqueName: \"kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555572 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data\") pod \"927c9cca-f27a-43fc-a67b-0cf398001a9d\" (UID: \"927c9cca-f27a-43fc-a67b-0cf398001a9d\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555606 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555629 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555673 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555721 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.555747 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs\") pod \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\" (UID: \"de56880e-c3e2-46db-b63d-c46acd0f6e1f\") " Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.556064 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.556179 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgbsz\" (UniqueName: \"kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.556374 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.556449 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.556898 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.567115 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx" (OuterVolumeSpecName: "kube-api-access-cg2rx") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "kube-api-access-cg2rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.582764 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.589419 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs" (OuterVolumeSpecName: "logs") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.612721 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.616102 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t" (OuterVolumeSpecName: "kube-api-access-tbc6t") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "kube-api-access-tbc6t". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.625262 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts" (OuterVolumeSpecName: "scripts") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.637104 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts" (OuterVolumeSpecName: "scripts") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.661323 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.661422 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.662085 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgbsz\" (UniqueName: \"kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.663268 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.674803 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg2rx\" (UniqueName: \"kubernetes.io/projected/927c9cca-f27a-43fc-a67b-0cf398001a9d-kube-api-access-cg2rx\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675177 4592 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675193 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675236 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675253 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/927c9cca-f27a-43fc-a67b-0cf398001a9d-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675268 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbc6t\" (UniqueName: \"kubernetes.io/projected/de56880e-c3e2-46db-b63d-c46acd0f6e1f-kube-api-access-tbc6t\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675281 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.675320 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de56880e-c3e2-46db-b63d-c46acd0f6e1f-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.677035 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.677474 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.681710 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.690903 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgbsz\" (UniqueName: \"kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz\") pod \"nova-cell1-cell-mapping-m6cdw\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.697328 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data" (OuterVolumeSpecName: "config-data") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.702374 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.711826 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.732626 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de56880e-c3e2-46db-b63d-c46acd0f6e1f" (UID: "de56880e-c3e2-46db-b63d-c46acd0f6e1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.785860 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.785901 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de56880e-c3e2-46db-b63d-c46acd0f6e1f-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.785915 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.785927 4592 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/de56880e-c3e2-46db-b63d-c46acd0f6e1f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.795972 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data" (OuterVolumeSpecName: "config-data") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.807851 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "927c9cca-f27a-43fc-a67b-0cf398001a9d" (UID: "927c9cca-f27a-43fc-a67b-0cf398001a9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.826918 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.894956 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:14 crc kubenswrapper[4592]: I0929 17:13:14.894998 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/927c9cca-f27a-43fc-a67b-0cf398001a9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.052968 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.101381 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data\") pod \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.101517 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle\") pod \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.101679 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmwt4\" (UniqueName: \"kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4\") pod \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.101775 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs\") pod \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\" (UID: \"c9621cde-29f2-4903-8bf8-9a002c5b6d45\") " Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.102745 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs" (OuterVolumeSpecName: "logs") pod "c9621cde-29f2-4903-8bf8-9a002c5b6d45" (UID: "c9621cde-29f2-4903-8bf8-9a002c5b6d45"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.107624 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4" (OuterVolumeSpecName: "kube-api-access-jmwt4") pod "c9621cde-29f2-4903-8bf8-9a002c5b6d45" (UID: "c9621cde-29f2-4903-8bf8-9a002c5b6d45"). InnerVolumeSpecName "kube-api-access-jmwt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.143504 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data" (OuterVolumeSpecName: "config-data") pod "c9621cde-29f2-4903-8bf8-9a002c5b6d45" (UID: "c9621cde-29f2-4903-8bf8-9a002c5b6d45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.198059 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9621cde-29f2-4903-8bf8-9a002c5b6d45" (UID: "c9621cde-29f2-4903-8bf8-9a002c5b6d45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.216846 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmwt4\" (UniqueName: \"kubernetes.io/projected/c9621cde-29f2-4903-8bf8-9a002c5b6d45-kube-api-access-jmwt4\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.216888 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9621cde-29f2-4903-8bf8-9a002c5b6d45-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.216900 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.216914 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9621cde-29f2-4903-8bf8-9a002c5b6d45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.222479 4592 generic.go:334] "Generic (PLEG): container finished" podID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerID="3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99" exitCode=0 Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.222567 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerDied","Data":"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99"} Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.222593 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c9621cde-29f2-4903-8bf8-9a002c5b6d45","Type":"ContainerDied","Data":"f747e76761b36e7907ae0512147d61fbdf5db991ac507d01422f634de36ba57c"} Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.222608 4592 scope.go:117] "RemoveContainer" containerID="3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.222725 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.236523 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"927c9cca-f27a-43fc-a67b-0cf398001a9d","Type":"ContainerDied","Data":"c8f68b6408abd45e19115a4597fb4b8420f63b58a57ed35cd2a359fef7e0e740"} Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.236892 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.261828 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-c9567f99b-8nh47" event={"ID":"de56880e-c3e2-46db-b63d-c46acd0f6e1f","Type":"ContainerDied","Data":"e68cf4336428d074cf25556144e91fa928ad9f137e8548b2badb168f6ac6fec8"} Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.261951 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-c9567f99b-8nh47" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.296450 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.302517 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.325561 4592 scope.go:117] "RemoveContainer" containerID="3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.327338 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.342277 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343139 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-log" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343177 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-log" Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343192 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343197 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343219 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343240 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343248 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-api" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343253 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-api" Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343269 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon-log" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343275 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon-log" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343516 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-api" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343532 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343556 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon-log" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343566 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343576 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" containerName="nova-api-log" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343585 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.343837 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.343864 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" containerName="horizon" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.345001 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.347334 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.347839 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.348201 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.349367 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-c9567f99b-8nh47"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.390004 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.404833 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.416296 4592 scope.go:117] "RemoveContainer" containerID="3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.419397 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.422423 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99\": container with ID starting with 3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99 not found: ID does not exist" containerID="3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.422459 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99"} err="failed to get container status \"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99\": rpc error: code = NotFound desc = could not find container \"3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99\": container with ID starting with 3c8e5526272b3c4c2d8362429b3b10495162421318cb0c8eaadd7ac0ac3e4c99 not found: ID does not exist" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.422486 4592 scope.go:117] "RemoveContainer" containerID="3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.429657 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: E0929 17:13:15.429704 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b\": container with ID starting with 3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b not found: ID does not exist" containerID="3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.429741 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b"} err="failed to get container status \"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b\": rpc error: code = NotFound desc = could not find container \"3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b\": container with ID starting with 3fa2c8c65a9607d872632df0cf48bbeed4e75a321f99f24116086a485e72ad0b not found: ID does not exist" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.429766 4592 scope.go:117] "RemoveContainer" containerID="21e50874edd4c09a7f869b409acf1dc0f4db0aa91b07f1de3637b67c02524077" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.432611 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.435455 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.435620 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438673 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438764 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438810 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438851 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438896 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.438925 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2tnc\" (UniqueName: \"kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.455494 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-m6cdw"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.468794 4592 scope.go:117] "RemoveContainer" containerID="ee788493b370795efc6a1158b5ec50f2596d7a4f0f829d9b31581415b707d7c5" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.469893 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.527240 4592 scope.go:117] "RemoveContainer" containerID="1484b5d3077673fc809902af54809499782a7fa919400cbc44e7b131ac5d2fbe" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540701 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2tnc\" (UniqueName: \"kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540778 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540867 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540905 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540936 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.540972 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541004 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541043 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghwtj\" (UniqueName: \"kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541070 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541178 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541214 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541274 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.541741 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.546064 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.547610 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.558058 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.558782 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.572030 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2tnc\" (UniqueName: \"kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc\") pod \"nova-api-0\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.573897 4592 scope.go:117] "RemoveContainer" containerID="7e6701a8aad1b44dc13f8a04622e2ed13cfcd60bfd7b798e732d2b9f860fee40" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.595881 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.596119 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" containerName="kube-state-metrics" containerID="cri-o://5326605e8fbcd5a21eb61bfc445ac1cd609bfcad97dc8950b790451debaf91bf" gracePeriod=30 Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.624439 4592 scope.go:117] "RemoveContainer" containerID="8e464d652fc134a4e820c67f002eb02e3dca835f203b4f1858bc3d8b584b6796" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643668 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643738 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643788 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghwtj\" (UniqueName: \"kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643819 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643848 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.643883 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.644030 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.644540 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.645885 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.650556 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.650647 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.651454 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.662207 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.665902 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghwtj\" (UniqueName: \"kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj\") pod \"ceilometer-0\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.684205 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.784504 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:15 crc kubenswrapper[4592]: I0929 17:13:15.845480 4592 scope.go:117] "RemoveContainer" containerID="c044f7528ceed902d66d79d1f760ee4728348faeae6cb231d66d1c9544900e03" Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.316517 4592 generic.go:334] "Generic (PLEG): container finished" podID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" containerID="5326605e8fbcd5a21eb61bfc445ac1cd609bfcad97dc8950b790451debaf91bf" exitCode=2 Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.317016 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"27cb9cb7-75ba-479b-ad1e-2e47beaefae4","Type":"ContainerDied","Data":"5326605e8fbcd5a21eb61bfc445ac1cd609bfcad97dc8950b790451debaf91bf"} Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.317074 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"27cb9cb7-75ba-479b-ad1e-2e47beaefae4","Type":"ContainerDied","Data":"20acf68f555b58d3074a3250cafd7ec80f2c7b01e71ae86c7b838d8e154d6e13"} Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.317092 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20acf68f555b58d3074a3250cafd7ec80f2c7b01e71ae86c7b838d8e154d6e13" Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.326065 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m6cdw" event={"ID":"2da5133b-922b-406d-8895-ef8b6c3907f9","Type":"ContainerStarted","Data":"4f61a4ef10488b20c9096ca3b9ddfa329bd291837f3e6de80a5ac89519252ece"} Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.326105 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m6cdw" event={"ID":"2da5133b-922b-406d-8895-ef8b6c3907f9","Type":"ContainerStarted","Data":"1589ddba3c7d8b244929b9b833aa8efa45616379b409e5cc0dd6b435537dfb65"} Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.352943 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-m6cdw" podStartSLOduration=2.352925537 podStartE2EDuration="2.352925537s" podCreationTimestamp="2025-09-29 17:13:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:16.349097593 +0000 UTC m=+1326.496875274" watchObservedRunningTime="2025-09-29 17:13:16.352925537 +0000 UTC m=+1326.500703208" Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.358927 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.435424 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:16 crc kubenswrapper[4592]: W0929 17:13:16.452426 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd172912_b072_4507_8303_f8be7bd78418.slice/crio-2afd397054f209b66559224eb31cbd3306c900fcab010bb92a9e1989ea53426f WatchSource:0}: Error finding container 2afd397054f209b66559224eb31cbd3306c900fcab010bb92a9e1989ea53426f: Status 404 returned error can't find the container with id 2afd397054f209b66559224eb31cbd3306c900fcab010bb92a9e1989ea53426f Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.469706 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rrps\" (UniqueName: \"kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps\") pod \"27cb9cb7-75ba-479b-ad1e-2e47beaefae4\" (UID: \"27cb9cb7-75ba-479b-ad1e-2e47beaefae4\") " Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.480782 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps" (OuterVolumeSpecName: "kube-api-access-5rrps") pod "27cb9cb7-75ba-479b-ad1e-2e47beaefae4" (UID: "27cb9cb7-75ba-479b-ad1e-2e47beaefae4"). InnerVolumeSpecName "kube-api-access-5rrps". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.519876 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:16 crc kubenswrapper[4592]: I0929 17:13:16.571981 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rrps\" (UniqueName: \"kubernetes.io/projected/27cb9cb7-75ba-479b-ad1e-2e47beaefae4-kube-api-access-5rrps\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.198788 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927c9cca-f27a-43fc-a67b-0cf398001a9d" path="/var/lib/kubelet/pods/927c9cca-f27a-43fc-a67b-0cf398001a9d/volumes" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.200393 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9621cde-29f2-4903-8bf8-9a002c5b6d45" path="/var/lib/kubelet/pods/c9621cde-29f2-4903-8bf8-9a002c5b6d45/volumes" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.202069 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de56880e-c3e2-46db-b63d-c46acd0f6e1f" path="/var/lib/kubelet/pods/de56880e-c3e2-46db-b63d-c46acd0f6e1f/volumes" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.342291 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerStarted","Data":"8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997"} Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.342333 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerStarted","Data":"2afd397054f209b66559224eb31cbd3306c900fcab010bb92a9e1989ea53426f"} Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.344556 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerStarted","Data":"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519"} Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.344583 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerStarted","Data":"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06"} Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.344593 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerStarted","Data":"2ac4068de62964f96b141e27042b8f37308aeba118a5735526a0fe1da7059bb4"} Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.344638 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.368373 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.368357196 podStartE2EDuration="2.368357196s" podCreationTimestamp="2025-09-29 17:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:17.366182647 +0000 UTC m=+1327.513960328" watchObservedRunningTime="2025-09-29 17:13:17.368357196 +0000 UTC m=+1327.516134867" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.394201 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.403692 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.414667 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:17 crc kubenswrapper[4592]: E0929 17:13:17.415093 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" containerName="kube-state-metrics" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.415111 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" containerName="kube-state-metrics" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.415620 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" containerName="kube-state-metrics" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.416573 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.422914 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.423179 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.428920 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.489093 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.489265 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.489339 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.489495 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srklz\" (UniqueName: \"kubernetes.io/projected/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-api-access-srklz\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.591955 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srklz\" (UniqueName: \"kubernetes.io/projected/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-api-access-srklz\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.592092 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.592380 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.592414 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.612756 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.612851 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.615337 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srklz\" (UniqueName: \"kubernetes.io/projected/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-api-access-srklz\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.616705 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e655a7a-19bc-4d0d-ab87-2c906903d7c8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"1e655a7a-19bc-4d0d-ab87-2c906903d7c8\") " pod="openstack/kube-state-metrics-0" Sep 29 17:13:17 crc kubenswrapper[4592]: I0929 17:13:17.747293 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.051715 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.361112 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerStarted","Data":"dea6361a6f5718b7b7a772688015b421f5967f0fc255e1cdc533e6a59f1ffb26"} Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.666406 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.757391 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.757770 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="dnsmasq-dns" containerID="cri-o://865ad78aaaaf4f6d704ff809f1247aa134c6100d27d8cf665bb7f00e2be4af28" gracePeriod=10 Sep 29 17:13:18 crc kubenswrapper[4592]: I0929 17:13:18.994085 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.204942 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27cb9cb7-75ba-479b-ad1e-2e47beaefae4" path="/var/lib/kubelet/pods/27cb9cb7-75ba-479b-ad1e-2e47beaefae4/volumes" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.380924 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerStarted","Data":"f13191d49b8fd772fc07a91f8bd3fa0b432ae07938497f8b5000e2509c52547d"} Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.391814 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1e655a7a-19bc-4d0d-ab87-2c906903d7c8","Type":"ContainerStarted","Data":"260c1e231593903e254e0887e1024c00f4d95b1e2cdd8daa3bb72c38fc1ce4f2"} Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.406736 4592 generic.go:334] "Generic (PLEG): container finished" podID="df77059a-3702-42ad-a217-1d527ae7c8af" containerID="865ad78aaaaf4f6d704ff809f1247aa134c6100d27d8cf665bb7f00e2be4af28" exitCode=0 Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.406881 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" event={"ID":"df77059a-3702-42ad-a217-1d527ae7c8af","Type":"ContainerDied","Data":"865ad78aaaaf4f6d704ff809f1247aa134c6100d27d8cf665bb7f00e2be4af28"} Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.406946 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" event={"ID":"df77059a-3702-42ad-a217-1d527ae7c8af","Type":"ContainerDied","Data":"3bf57128fa0144e7f42d7c6b1c920941f8d8684c36edd4b89a12333a3137f9e1"} Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.407011 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bf57128fa0144e7f42d7c6b1c920941f8d8684c36edd4b89a12333a3137f9e1" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.427028 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451338 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndltn\" (UniqueName: \"kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451485 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451686 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451731 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451776 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.451841 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb\") pod \"df77059a-3702-42ad-a217-1d527ae7c8af\" (UID: \"df77059a-3702-42ad-a217-1d527ae7c8af\") " Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.480795 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn" (OuterVolumeSpecName: "kube-api-access-ndltn") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "kube-api-access-ndltn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.554263 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndltn\" (UniqueName: \"kubernetes.io/projected/df77059a-3702-42ad-a217-1d527ae7c8af-kube-api-access-ndltn\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.569235 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config" (OuterVolumeSpecName: "config") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.584624 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.584887 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.587606 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.592720 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "df77059a-3702-42ad-a217-1d527ae7c8af" (UID: "df77059a-3702-42ad-a217-1d527ae7c8af"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.657133 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.657192 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.657205 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.657217 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:19 crc kubenswrapper[4592]: I0929 17:13:19.657228 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/df77059a-3702-42ad-a217-1d527ae7c8af-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.416201 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-kgpkc" Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.416200 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"1e655a7a-19bc-4d0d-ab87-2c906903d7c8","Type":"ContainerStarted","Data":"c2781ef659c8e334df607ff1c34a93ed5439cc03523f35e1ce444dc62520fa88"} Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.416755 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.446162 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.083822906 podStartE2EDuration="3.446120896s" podCreationTimestamp="2025-09-29 17:13:17 +0000 UTC" firstStartedPulling="2025-09-29 17:13:19.044827169 +0000 UTC m=+1329.192604850" lastFinishedPulling="2025-09-29 17:13:19.407125159 +0000 UTC m=+1329.554902840" observedRunningTime="2025-09-29 17:13:20.433967638 +0000 UTC m=+1330.581745319" watchObservedRunningTime="2025-09-29 17:13:20.446120896 +0000 UTC m=+1330.593898577" Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.463593 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:13:20 crc kubenswrapper[4592]: I0929 17:13:20.485904 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-kgpkc"] Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.199359 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" path="/var/lib/kubelet/pods/df77059a-3702-42ad-a217-1d527ae7c8af/volumes" Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.427341 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-central-agent" containerID="cri-o://8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997" gracePeriod=30 Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.427648 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerStarted","Data":"6ff6c51733731411e7ca6017081bb7b8d7b73cbf507d706d6a6dbf9b601ea323"} Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.427730 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.427918 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="proxy-httpd" containerID="cri-o://6ff6c51733731411e7ca6017081bb7b8d7b73cbf507d706d6a6dbf9b601ea323" gracePeriod=30 Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.427998 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="sg-core" containerID="cri-o://f13191d49b8fd772fc07a91f8bd3fa0b432ae07938497f8b5000e2509c52547d" gracePeriod=30 Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.428055 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-notification-agent" containerID="cri-o://dea6361a6f5718b7b7a772688015b421f5967f0fc255e1cdc533e6a59f1ffb26" gracePeriod=30 Sep 29 17:13:21 crc kubenswrapper[4592]: I0929 17:13:21.460965 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.874856631 podStartE2EDuration="6.460942339s" podCreationTimestamp="2025-09-29 17:13:15 +0000 UTC" firstStartedPulling="2025-09-29 17:13:16.462841267 +0000 UTC m=+1326.610618948" lastFinishedPulling="2025-09-29 17:13:21.048926975 +0000 UTC m=+1331.196704656" observedRunningTime="2025-09-29 17:13:21.453347224 +0000 UTC m=+1331.601124905" watchObservedRunningTime="2025-09-29 17:13:21.460942339 +0000 UTC m=+1331.608720030" Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.437920 4592 generic.go:334] "Generic (PLEG): container finished" podID="dd172912-b072-4507-8303-f8be7bd78418" containerID="6ff6c51733731411e7ca6017081bb7b8d7b73cbf507d706d6a6dbf9b601ea323" exitCode=0 Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.438236 4592 generic.go:334] "Generic (PLEG): container finished" podID="dd172912-b072-4507-8303-f8be7bd78418" containerID="f13191d49b8fd772fc07a91f8bd3fa0b432ae07938497f8b5000e2509c52547d" exitCode=2 Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.438247 4592 generic.go:334] "Generic (PLEG): container finished" podID="dd172912-b072-4507-8303-f8be7bd78418" containerID="dea6361a6f5718b7b7a772688015b421f5967f0fc255e1cdc533e6a59f1ffb26" exitCode=0 Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.438127 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerDied","Data":"6ff6c51733731411e7ca6017081bb7b8d7b73cbf507d706d6a6dbf9b601ea323"} Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.438276 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerDied","Data":"f13191d49b8fd772fc07a91f8bd3fa0b432ae07938497f8b5000e2509c52547d"} Sep 29 17:13:22 crc kubenswrapper[4592]: I0929 17:13:22.438289 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerDied","Data":"dea6361a6f5718b7b7a772688015b421f5967f0fc255e1cdc533e6a59f1ffb26"} Sep 29 17:13:23 crc kubenswrapper[4592]: E0929 17:13:23.316723 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd172912_b072_4507_8303_f8be7bd78418.slice/crio-8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd172912_b072_4507_8303_f8be7bd78418.slice/crio-conmon-8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.461810 4592 generic.go:334] "Generic (PLEG): container finished" podID="dd172912-b072-4507-8303-f8be7bd78418" containerID="8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997" exitCode=0 Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.461857 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerDied","Data":"8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997"} Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.465420 4592 generic.go:334] "Generic (PLEG): container finished" podID="2da5133b-922b-406d-8895-ef8b6c3907f9" containerID="4f61a4ef10488b20c9096ca3b9ddfa329bd291837f3e6de80a5ac89519252ece" exitCode=0 Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.465464 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m6cdw" event={"ID":"2da5133b-922b-406d-8895-ef8b6c3907f9","Type":"ContainerDied","Data":"4f61a4ef10488b20c9096ca3b9ddfa329bd291837f3e6de80a5ac89519252ece"} Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.582090 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.733347 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.733409 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghwtj\" (UniqueName: \"kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.733607 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734046 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734106 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734126 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734463 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734498 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts\") pod \"dd172912-b072-4507-8303-f8be7bd78418\" (UID: \"dd172912-b072-4507-8303-f8be7bd78418\") " Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.734923 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.735314 4592 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-log-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.735339 4592 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dd172912-b072-4507-8303-f8be7bd78418-run-httpd\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.740298 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts" (OuterVolumeSpecName: "scripts") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.741592 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj" (OuterVolumeSpecName: "kube-api-access-ghwtj") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "kube-api-access-ghwtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.766645 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.833331 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.836699 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.836736 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.836749 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghwtj\" (UniqueName: \"kubernetes.io/projected/dd172912-b072-4507-8303-f8be7bd78418-kube-api-access-ghwtj\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.836759 4592 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.862086 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data" (OuterVolumeSpecName: "config-data") pod "dd172912-b072-4507-8303-f8be7bd78418" (UID: "dd172912-b072-4507-8303-f8be7bd78418"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:23 crc kubenswrapper[4592]: I0929 17:13:23.938359 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd172912-b072-4507-8303-f8be7bd78418-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.476034 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.476258 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dd172912-b072-4507-8303-f8be7bd78418","Type":"ContainerDied","Data":"2afd397054f209b66559224eb31cbd3306c900fcab010bb92a9e1989ea53426f"} Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.477360 4592 scope.go:117] "RemoveContainer" containerID="6ff6c51733731411e7ca6017081bb7b8d7b73cbf507d706d6a6dbf9b601ea323" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.509581 4592 scope.go:117] "RemoveContainer" containerID="f13191d49b8fd772fc07a91f8bd3fa0b432ae07938497f8b5000e2509c52547d" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.521436 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.528340 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.538590 4592 scope.go:117] "RemoveContainer" containerID="dea6361a6f5718b7b7a772688015b421f5967f0fc255e1cdc533e6a59f1ffb26" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.554567 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.554956 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="dnsmasq-dns" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.554978 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="dnsmasq-dns" Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.555002 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-central-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555009 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-central-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.555025 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-notification-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555033 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-notification-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.555045 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="sg-core" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555051 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="sg-core" Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.555068 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="proxy-httpd" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555074 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="proxy-httpd" Sep 29 17:13:24 crc kubenswrapper[4592]: E0929 17:13:24.555086 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="init" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555092 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="init" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555273 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="proxy-httpd" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555289 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="sg-core" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555303 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-central-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555318 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd172912-b072-4507-8303-f8be7bd78418" containerName="ceilometer-notification-agent" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.555330 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="df77059a-3702-42ad-a217-1d527ae7c8af" containerName="dnsmasq-dns" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.557077 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.560091 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.560349 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.560718 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.567845 4592 scope.go:117] "RemoveContainer" containerID="8fdff5f483c145ff47c7a90354df909e3f2112cde2cfd6d1ff065fc07f80f997" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.576395 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756555 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-run-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756644 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-config-data\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756718 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsm4n\" (UniqueName: \"kubernetes.io/projected/ff89d1c7-4a66-4e00-b04d-24e917c56e11-kube-api-access-fsm4n\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756840 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756945 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.756985 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-log-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.757016 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-scripts\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.757038 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858528 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858568 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-log-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858603 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-scripts\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858622 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858657 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-run-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858675 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-config-data\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858720 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsm4n\" (UniqueName: \"kubernetes.io/projected/ff89d1c7-4a66-4e00-b04d-24e917c56e11-kube-api-access-fsm4n\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.858812 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.859313 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-log-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.859380 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ff89d1c7-4a66-4e00-b04d-24e917c56e11-run-httpd\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.860760 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.865198 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.865206 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-config-data\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.865587 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-scripts\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.875672 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.883066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff89d1c7-4a66-4e00-b04d-24e917c56e11-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:24 crc kubenswrapper[4592]: I0929 17:13:24.884600 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsm4n\" (UniqueName: \"kubernetes.io/projected/ff89d1c7-4a66-4e00-b04d-24e917c56e11-kube-api-access-fsm4n\") pod \"ceilometer-0\" (UID: \"ff89d1c7-4a66-4e00-b04d-24e917c56e11\") " pod="openstack/ceilometer-0" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.061267 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgbsz\" (UniqueName: \"kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz\") pod \"2da5133b-922b-406d-8895-ef8b6c3907f9\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.061568 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data\") pod \"2da5133b-922b-406d-8895-ef8b6c3907f9\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.061641 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts\") pod \"2da5133b-922b-406d-8895-ef8b6c3907f9\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.061692 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle\") pod \"2da5133b-922b-406d-8895-ef8b6c3907f9\" (UID: \"2da5133b-922b-406d-8895-ef8b6c3907f9\") " Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.065545 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz" (OuterVolumeSpecName: "kube-api-access-lgbsz") pod "2da5133b-922b-406d-8895-ef8b6c3907f9" (UID: "2da5133b-922b-406d-8895-ef8b6c3907f9"). InnerVolumeSpecName "kube-api-access-lgbsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.066606 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts" (OuterVolumeSpecName: "scripts") pod "2da5133b-922b-406d-8895-ef8b6c3907f9" (UID: "2da5133b-922b-406d-8895-ef8b6c3907f9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.086733 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data" (OuterVolumeSpecName: "config-data") pod "2da5133b-922b-406d-8895-ef8b6c3907f9" (UID: "2da5133b-922b-406d-8895-ef8b6c3907f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.086751 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2da5133b-922b-406d-8895-ef8b6c3907f9" (UID: "2da5133b-922b-406d-8895-ef8b6c3907f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.163978 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgbsz\" (UniqueName: \"kubernetes.io/projected/2da5133b-922b-406d-8895-ef8b6c3907f9-kube-api-access-lgbsz\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.164253 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.164323 4592 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-scripts\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.164376 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2da5133b-922b-406d-8895-ef8b6c3907f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.173108 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.194687 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd172912-b072-4507-8303-f8be7bd78418" path="/var/lib/kubelet/pods/dd172912-b072-4507-8303-f8be7bd78418/volumes" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.489053 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-m6cdw" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.489019 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-m6cdw" event={"ID":"2da5133b-922b-406d-8895-ef8b6c3907f9","Type":"ContainerDied","Data":"1589ddba3c7d8b244929b9b833aa8efa45616379b409e5cc0dd6b435537dfb65"} Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.490268 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1589ddba3c7d8b244929b9b833aa8efa45616379b409e5cc0dd6b435537dfb65" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.674519 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.685154 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.685197 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.705039 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.712843 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.713059 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" containerID="cri-o://e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" gracePeriod=30 Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.730923 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.731229 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" containerID="cri-o://bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b" gracePeriod=30 Sep 29 17:13:25 crc kubenswrapper[4592]: I0929 17:13:25.731279 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" containerID="cri-o://ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d" gracePeriod=30 Sep 29 17:13:26 crc kubenswrapper[4592]: E0929 17:13:26.280227 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:26 crc kubenswrapper[4592]: E0929 17:13:26.281755 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:26 crc kubenswrapper[4592]: E0929 17:13:26.282867 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:26 crc kubenswrapper[4592]: E0929 17:13:26.282931 4592 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.508548 4592 generic.go:334] "Generic (PLEG): container finished" podID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerID="bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b" exitCode=143 Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.508635 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerDied","Data":"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b"} Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.510617 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff89d1c7-4a66-4e00-b04d-24e917c56e11","Type":"ContainerStarted","Data":"e0463d2246f3da625e5b3c83ccc96655208c496507be5940aa7c8126dcecc59b"} Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.510663 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff89d1c7-4a66-4e00-b04d-24e917c56e11","Type":"ContainerStarted","Data":"451ec872861ecf4ee0c527430d8551c0a36b7b8138c167c0d305f6a73052826e"} Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.510750 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-log" containerID="cri-o://f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06" gracePeriod=30 Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.510808 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-api" containerID="cri-o://ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519" gracePeriod=30 Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.518995 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": EOF" Sep 29 17:13:26 crc kubenswrapper[4592]: I0929 17:13:26.520349 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": EOF" Sep 29 17:13:27 crc kubenswrapper[4592]: I0929 17:13:27.519116 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff89d1c7-4a66-4e00-b04d-24e917c56e11","Type":"ContainerStarted","Data":"51d8c0823f968cf1e953157bb069e57cedc562339839331be4df62e34bae86fe"} Sep 29 17:13:27 crc kubenswrapper[4592]: I0929 17:13:27.520836 4592 generic.go:334] "Generic (PLEG): container finished" podID="52571774-6bbb-407e-80ee-3b9d103b4292" containerID="f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06" exitCode=143 Sep 29 17:13:27 crc kubenswrapper[4592]: I0929 17:13:27.520863 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerDied","Data":"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06"} Sep 29 17:13:27 crc kubenswrapper[4592]: I0929 17:13:27.772635 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Sep 29 17:13:28 crc kubenswrapper[4592]: I0929 17:13:28.532537 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff89d1c7-4a66-4e00-b04d-24e917c56e11","Type":"ContainerStarted","Data":"1019dac8060ee4ca14aa20206de69bc67f8f8e08b9f438b22912b6d8687c3448"} Sep 29 17:13:28 crc kubenswrapper[4592]: I0929 17:13:28.880582 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:55870->10.217.0.195:8775: read: connection reset by peer" Sep 29 17:13:28 crc kubenswrapper[4592]: I0929 17:13:28.880810 4592 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:55882->10.217.0.195:8775: read: connection reset by peer" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.298331 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.443772 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs\") pod \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.443847 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data\") pod \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.443959 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle\") pod \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.443980 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbdsj\" (UniqueName: \"kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj\") pod \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.444087 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs\") pod \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\" (UID: \"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4\") " Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.445645 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs" (OuterVolumeSpecName: "logs") pod "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" (UID: "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.484695 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj" (OuterVolumeSpecName: "kube-api-access-gbdsj") pod "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" (UID: "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4"). InnerVolumeSpecName "kube-api-access-gbdsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.492936 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data" (OuterVolumeSpecName: "config-data") pod "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" (UID: "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.512963 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" (UID: "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.545678 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.545702 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.545714 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.545726 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbdsj\" (UniqueName: \"kubernetes.io/projected/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-kube-api-access-gbdsj\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.548174 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ff89d1c7-4a66-4e00-b04d-24e917c56e11","Type":"ContainerStarted","Data":"b5e93627d8162245fc31c23e664d00d52239d5ea0255ae3c290d63d087980e0e"} Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.549033 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.550899 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" (UID: "7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.553421 4592 generic.go:334] "Generic (PLEG): container finished" podID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerID="ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d" exitCode=0 Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.553451 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerDied","Data":"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d"} Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.553492 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4","Type":"ContainerDied","Data":"ef87b82d2fe1ea1fc61e57f2a9f63f7946661a7b68761745082022d352c1790b"} Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.553510 4592 scope.go:117] "RemoveContainer" containerID="ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.553654 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.587314 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.252597704 podStartE2EDuration="5.587290116s" podCreationTimestamp="2025-09-29 17:13:24 +0000 UTC" firstStartedPulling="2025-09-29 17:13:25.661556762 +0000 UTC m=+1335.809334443" lastFinishedPulling="2025-09-29 17:13:28.996249174 +0000 UTC m=+1339.144026855" observedRunningTime="2025-09-29 17:13:29.582493076 +0000 UTC m=+1339.730270767" watchObservedRunningTime="2025-09-29 17:13:29.587290116 +0000 UTC m=+1339.735067797" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.590844 4592 scope.go:117] "RemoveContainer" containerID="bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.607170 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.625702 4592 scope.go:117] "RemoveContainer" containerID="ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d" Sep 29 17:13:29 crc kubenswrapper[4592]: E0929 17:13:29.632274 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d\": container with ID starting with ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d not found: ID does not exist" containerID="ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.632336 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d"} err="failed to get container status \"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d\": rpc error: code = NotFound desc = could not find container \"ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d\": container with ID starting with ac5e460e775b4d0553698e7ad2f55e025e45be57051b779886f2fe8b3ec93a6d not found: ID does not exist" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.632408 4592 scope.go:117] "RemoveContainer" containerID="bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b" Sep 29 17:13:29 crc kubenswrapper[4592]: E0929 17:13:29.632774 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b\": container with ID starting with bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b not found: ID does not exist" containerID="bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.632795 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b"} err="failed to get container status \"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b\": rpc error: code = NotFound desc = could not find container \"bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b\": container with ID starting with bb58c61534c8be99ed826b717a921d7da784a60fd0825539fc781665ecfc782b not found: ID does not exist" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.637319 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.656379 4592 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.689266 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:29 crc kubenswrapper[4592]: E0929 17:13:29.691614 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.691644 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" Sep 29 17:13:29 crc kubenswrapper[4592]: E0929 17:13:29.691681 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2da5133b-922b-406d-8895-ef8b6c3907f9" containerName="nova-manage" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.691689 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2da5133b-922b-406d-8895-ef8b6c3907f9" containerName="nova-manage" Sep 29 17:13:29 crc kubenswrapper[4592]: E0929 17:13:29.691710 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.691718 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.692241 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2da5133b-922b-406d-8895-ef8b6c3907f9" containerName="nova-manage" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.692272 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-metadata" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.692308 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" containerName="nova-metadata-log" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.694800 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.696521 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.702553 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.703459 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.860285 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-config-data\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.860322 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.860413 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7mfd\" (UniqueName: \"kubernetes.io/projected/483fd1ac-005e-4d6f-8d1d-03a192a3b366-kube-api-access-q7mfd\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.860432 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.860486 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483fd1ac-005e-4d6f-8d1d-03a192a3b366-logs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.962460 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7mfd\" (UniqueName: \"kubernetes.io/projected/483fd1ac-005e-4d6f-8d1d-03a192a3b366-kube-api-access-q7mfd\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.962502 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.962544 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483fd1ac-005e-4d6f-8d1d-03a192a3b366-logs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.962639 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-config-data\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.962655 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.963747 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/483fd1ac-005e-4d6f-8d1d-03a192a3b366-logs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.965698 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.970779 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.970904 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/483fd1ac-005e-4d6f-8d1d-03a192a3b366-config-data\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:29 crc kubenswrapper[4592]: I0929 17:13:29.979116 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7mfd\" (UniqueName: \"kubernetes.io/projected/483fd1ac-005e-4d6f-8d1d-03a192a3b366-kube-api-access-q7mfd\") pod \"nova-metadata-0\" (UID: \"483fd1ac-005e-4d6f-8d1d-03a192a3b366\") " pod="openstack/nova-metadata-0" Sep 29 17:13:30 crc kubenswrapper[4592]: I0929 17:13:30.014505 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Sep 29 17:13:30 crc kubenswrapper[4592]: I0929 17:13:30.473244 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Sep 29 17:13:30 crc kubenswrapper[4592]: W0929 17:13:30.487769 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod483fd1ac_005e_4d6f_8d1d_03a192a3b366.slice/crio-e05cdc08cc0c30c33d015f9e0776a980c66aa637a3ec1affb6b5023ab9ce4d5b WatchSource:0}: Error finding container e05cdc08cc0c30c33d015f9e0776a980c66aa637a3ec1affb6b5023ab9ce4d5b: Status 404 returned error can't find the container with id e05cdc08cc0c30c33d015f9e0776a980c66aa637a3ec1affb6b5023ab9ce4d5b Sep 29 17:13:30 crc kubenswrapper[4592]: I0929 17:13:30.565660 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"483fd1ac-005e-4d6f-8d1d-03a192a3b366","Type":"ContainerStarted","Data":"e05cdc08cc0c30c33d015f9e0776a980c66aa637a3ec1affb6b5023ab9ce4d5b"} Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.207060 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4" path="/var/lib/kubelet/pods/7e904f32-8f8e-4dcc-8f4e-8cbfe3bd07a4/volumes" Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.278608 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 is running failed: container process not found" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.279129 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 is running failed: container process not found" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.279791 4592 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 is running failed: container process not found" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.279888 4592 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.318787 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.494357 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") pod \"f0935c62-3820-41b3-afaf-2e8417804197\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.494432 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data\") pod \"f0935c62-3820-41b3-afaf-2e8417804197\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.495361 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j52z8\" (UniqueName: \"kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8\") pod \"f0935c62-3820-41b3-afaf-2e8417804197\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.498930 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8" (OuterVolumeSpecName: "kube-api-access-j52z8") pod "f0935c62-3820-41b3-afaf-2e8417804197" (UID: "f0935c62-3820-41b3-afaf-2e8417804197"). InnerVolumeSpecName "kube-api-access-j52z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.538069 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data" (OuterVolumeSpecName: "config-data") pod "f0935c62-3820-41b3-afaf-2e8417804197" (UID: "f0935c62-3820-41b3-afaf-2e8417804197"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.577293 4592 generic.go:334] "Generic (PLEG): container finished" podID="f0935c62-3820-41b3-afaf-2e8417804197" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" exitCode=0 Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.577365 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0935c62-3820-41b3-afaf-2e8417804197","Type":"ContainerDied","Data":"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399"} Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.577395 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"f0935c62-3820-41b3-afaf-2e8417804197","Type":"ContainerDied","Data":"bb5456da38301913aa5d7b74531709d3079f7fb13166c7bb2b2098ab7df1c46d"} Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.577415 4592 scope.go:117] "RemoveContainer" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.577562 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.585801 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"483fd1ac-005e-4d6f-8d1d-03a192a3b366","Type":"ContainerStarted","Data":"f2f37a986593b60bbe8a2ff9c8758d0e1fd9caebcdc82bda62a04178ea53bdcb"} Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.585839 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"483fd1ac-005e-4d6f-8d1d-03a192a3b366","Type":"ContainerStarted","Data":"688fb558139e04ccd69ea3902c698854ed1fa1903448cef027f14574a79605d7"} Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.597293 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0935c62-3820-41b3-afaf-2e8417804197" (UID: "f0935c62-3820-41b3-afaf-2e8417804197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.597502 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") pod \"f0935c62-3820-41b3-afaf-2e8417804197\" (UID: \"f0935c62-3820-41b3-afaf-2e8417804197\") " Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.598278 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j52z8\" (UniqueName: \"kubernetes.io/projected/f0935c62-3820-41b3-afaf-2e8417804197-kube-api-access-j52z8\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.598301 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:31 crc kubenswrapper[4592]: W0929 17:13:31.598435 4592 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/f0935c62-3820-41b3-afaf-2e8417804197/volumes/kubernetes.io~secret/combined-ca-bundle Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.598456 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0935c62-3820-41b3-afaf-2e8417804197" (UID: "f0935c62-3820-41b3-afaf-2e8417804197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.608892 4592 scope.go:117] "RemoveContainer" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.609271 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399\": container with ID starting with e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 not found: ID does not exist" containerID="e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.609309 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399"} err="failed to get container status \"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399\": rpc error: code = NotFound desc = could not find container \"e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399\": container with ID starting with e4b3d83a356ab1912f55c08e3f0b8653a127b09681e024462f78f0bb23760399 not found: ID does not exist" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.620826 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.620807357 podStartE2EDuration="2.620807357s" podCreationTimestamp="2025-09-29 17:13:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:31.620089968 +0000 UTC m=+1341.767867649" watchObservedRunningTime="2025-09-29 17:13:31.620807357 +0000 UTC m=+1341.768585038" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.699552 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0935c62-3820-41b3-afaf-2e8417804197-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.907411 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.914968 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.933864 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:31 crc kubenswrapper[4592]: E0929 17:13:31.934270 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.934288 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.934480 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0935c62-3820-41b3-afaf-2e8417804197" containerName="nova-scheduler-scheduler" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.935072 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.937177 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Sep 29 17:13:31 crc kubenswrapper[4592]: I0929 17:13:31.944618 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.106873 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.107019 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-config-data\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.107051 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggrpl\" (UniqueName: \"kubernetes.io/projected/d200c10f-b6bd-4908-b79e-7ab4ae10587d-kube-api-access-ggrpl\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.209152 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.209293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-config-data\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.209328 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggrpl\" (UniqueName: \"kubernetes.io/projected/d200c10f-b6bd-4908-b79e-7ab4ae10587d-kube-api-access-ggrpl\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.213929 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.215918 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d200c10f-b6bd-4908-b79e-7ab4ae10587d-config-data\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.240074 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggrpl\" (UniqueName: \"kubernetes.io/projected/d200c10f-b6bd-4908-b79e-7ab4ae10587d-kube-api-access-ggrpl\") pod \"nova-scheduler-0\" (UID: \"d200c10f-b6bd-4908-b79e-7ab4ae10587d\") " pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.382313 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.394984 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515212 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515452 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515539 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515629 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515721 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.515896 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2tnc\" (UniqueName: \"kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc\") pod \"52571774-6bbb-407e-80ee-3b9d103b4292\" (UID: \"52571774-6bbb-407e-80ee-3b9d103b4292\") " Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.516252 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs" (OuterVolumeSpecName: "logs") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.517376 4592 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52571774-6bbb-407e-80ee-3b9d103b4292-logs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.521634 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc" (OuterVolumeSpecName: "kube-api-access-j2tnc") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "kube-api-access-j2tnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.555159 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data" (OuterVolumeSpecName: "config-data") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.575355 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.580868 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.584502 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "52571774-6bbb-407e-80ee-3b9d103b4292" (UID: "52571774-6bbb-407e-80ee-3b9d103b4292"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.615561 4592 generic.go:334] "Generic (PLEG): container finished" podID="52571774-6bbb-407e-80ee-3b9d103b4292" containerID="ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519" exitCode=0 Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.617315 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.617717 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerDied","Data":"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519"} Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.617824 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"52571774-6bbb-407e-80ee-3b9d103b4292","Type":"ContainerDied","Data":"2ac4068de62964f96b141e27042b8f37308aeba118a5735526a0fe1da7059bb4"} Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.617843 4592 scope.go:117] "RemoveContainer" containerID="ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.624926 4592 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-public-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.624963 4592 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.624973 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.625005 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2tnc\" (UniqueName: \"kubernetes.io/projected/52571774-6bbb-407e-80ee-3b9d103b4292-kube-api-access-j2tnc\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.625016 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52571774-6bbb-407e-80ee-3b9d103b4292-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.648933 4592 scope.go:117] "RemoveContainer" containerID="f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.666732 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.691467 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.700747 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: E0929 17:13:32.701203 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-log" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701219 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-log" Sep 29 17:13:32 crc kubenswrapper[4592]: E0929 17:13:32.701228 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-api" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701236 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-api" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701322 4592 scope.go:117] "RemoveContainer" containerID="ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701409 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-api" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701422 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" containerName="nova-api-log" Sep 29 17:13:32 crc kubenswrapper[4592]: E0929 17:13:32.701755 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519\": container with ID starting with ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519 not found: ID does not exist" containerID="ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701785 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519"} err="failed to get container status \"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519\": rpc error: code = NotFound desc = could not find container \"ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519\": container with ID starting with ae74197872800c681a10706e65fcfe3c387c9add9e6054fa2affd09e8c074519 not found: ID does not exist" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.701806 4592 scope.go:117] "RemoveContainer" containerID="f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06" Sep 29 17:13:32 crc kubenswrapper[4592]: E0929 17:13:32.702258 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06\": container with ID starting with f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06 not found: ID does not exist" containerID="f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.702282 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06"} err="failed to get container status \"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06\": rpc error: code = NotFound desc = could not find container \"f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06\": container with ID starting with f381c9f610a7651309e025c0ab0d386faacf294f398af575f44c8a0465914a06 not found: ID does not exist" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.702379 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.704188 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.704676 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.705398 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.730025 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828786 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828835 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96bba62b-2b30-4b47-af6b-5bf6e32275a1-logs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828870 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828918 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d6dm\" (UniqueName: \"kubernetes.io/projected/96bba62b-2b30-4b47-af6b-5bf6e32275a1-kube-api-access-8d6dm\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828944 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.828965 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-config-data\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.867348 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Sep 29 17:13:32 crc kubenswrapper[4592]: W0929 17:13:32.869742 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd200c10f_b6bd_4908_b79e_7ab4ae10587d.slice/crio-3e22201bde3422013c03ebafb4b07d77f1d27fe90828153156654952bb33823a WatchSource:0}: Error finding container 3e22201bde3422013c03ebafb4b07d77f1d27fe90828153156654952bb33823a: Status 404 returned error can't find the container with id 3e22201bde3422013c03ebafb4b07d77f1d27fe90828153156654952bb33823a Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.930738 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.930992 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96bba62b-2b30-4b47-af6b-5bf6e32275a1-logs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.931206 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.931766 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d6dm\" (UniqueName: \"kubernetes.io/projected/96bba62b-2b30-4b47-af6b-5bf6e32275a1-kube-api-access-8d6dm\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.932197 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.932529 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-config-data\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.931677 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96bba62b-2b30-4b47-af6b-5bf6e32275a1-logs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.935550 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.937356 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-config-data\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.942105 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.945513 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96bba62b-2b30-4b47-af6b-5bf6e32275a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:32 crc kubenswrapper[4592]: I0929 17:13:32.957068 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d6dm\" (UniqueName: \"kubernetes.io/projected/96bba62b-2b30-4b47-af6b-5bf6e32275a1-kube-api-access-8d6dm\") pod \"nova-api-0\" (UID: \"96bba62b-2b30-4b47-af6b-5bf6e32275a1\") " pod="openstack/nova-api-0" Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.022314 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.202155 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52571774-6bbb-407e-80ee-3b9d103b4292" path="/var/lib/kubelet/pods/52571774-6bbb-407e-80ee-3b9d103b4292/volumes" Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.203055 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0935c62-3820-41b3-afaf-2e8417804197" path="/var/lib/kubelet/pods/f0935c62-3820-41b3-afaf-2e8417804197/volumes" Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.533507 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.628300 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d200c10f-b6bd-4908-b79e-7ab4ae10587d","Type":"ContainerStarted","Data":"5ef60e2e7cdc036097ad4e463c50cace4c7eeacee95942165195a095c98b361b"} Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.630076 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d200c10f-b6bd-4908-b79e-7ab4ae10587d","Type":"ContainerStarted","Data":"3e22201bde3422013c03ebafb4b07d77f1d27fe90828153156654952bb33823a"} Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.633951 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96bba62b-2b30-4b47-af6b-5bf6e32275a1","Type":"ContainerStarted","Data":"344b71883a7484ddc9e1c898bae63902c40b498ce816e7a225c3c1a6813dab9e"} Sep 29 17:13:33 crc kubenswrapper[4592]: I0929 17:13:33.646357 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.646336202 podStartE2EDuration="2.646336202s" podCreationTimestamp="2025-09-29 17:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:33.643580178 +0000 UTC m=+1343.791357859" watchObservedRunningTime="2025-09-29 17:13:33.646336202 +0000 UTC m=+1343.794113883" Sep 29 17:13:34 crc kubenswrapper[4592]: I0929 17:13:34.645666 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96bba62b-2b30-4b47-af6b-5bf6e32275a1","Type":"ContainerStarted","Data":"8f832ba5eb36fd1a35ef1ae9bdd21493e47e746d1954c63b6c05e00bf68d799e"} Sep 29 17:13:34 crc kubenswrapper[4592]: I0929 17:13:34.645968 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96bba62b-2b30-4b47-af6b-5bf6e32275a1","Type":"ContainerStarted","Data":"3195ab56274f2f39a2d9f54fe54b6d6f574bea868c7b3314e67a638662bffa8e"} Sep 29 17:13:34 crc kubenswrapper[4592]: I0929 17:13:34.672070 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.672047149 podStartE2EDuration="2.672047149s" podCreationTimestamp="2025-09-29 17:13:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:13:34.660298102 +0000 UTC m=+1344.808075793" watchObservedRunningTime="2025-09-29 17:13:34.672047149 +0000 UTC m=+1344.819824830" Sep 29 17:13:35 crc kubenswrapper[4592]: I0929 17:13:35.014953 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:13:35 crc kubenswrapper[4592]: I0929 17:13:35.016551 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Sep 29 17:13:37 crc kubenswrapper[4592]: I0929 17:13:37.395913 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Sep 29 17:13:40 crc kubenswrapper[4592]: I0929 17:13:40.015483 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 17:13:40 crc kubenswrapper[4592]: I0929 17:13:40.016015 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Sep 29 17:13:41 crc kubenswrapper[4592]: I0929 17:13:41.030393 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="483fd1ac-005e-4d6f-8d1d-03a192a3b366" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:13:41 crc kubenswrapper[4592]: I0929 17:13:41.030613 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="483fd1ac-005e-4d6f-8d1d-03a192a3b366" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.205:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:13:42 crc kubenswrapper[4592]: I0929 17:13:42.395601 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Sep 29 17:13:42 crc kubenswrapper[4592]: I0929 17:13:42.474911 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Sep 29 17:13:42 crc kubenswrapper[4592]: I0929 17:13:42.747982 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Sep 29 17:13:43 crc kubenswrapper[4592]: I0929 17:13:43.022930 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:13:43 crc kubenswrapper[4592]: I0929 17:13:43.022984 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Sep 29 17:13:44 crc kubenswrapper[4592]: I0929 17:13:44.038385 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96bba62b-2b30-4b47-af6b-5bf6e32275a1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:13:44 crc kubenswrapper[4592]: I0929 17:13:44.038989 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96bba62b-2b30-4b47-af6b-5bf6e32275a1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Sep 29 17:13:50 crc kubenswrapper[4592]: I0929 17:13:50.020483 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 17:13:50 crc kubenswrapper[4592]: I0929 17:13:50.021108 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Sep 29 17:13:50 crc kubenswrapper[4592]: I0929 17:13:50.026663 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 17:13:50 crc kubenswrapper[4592]: I0929 17:13:50.028849 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.032915 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.033379 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.034413 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.034715 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.040102 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 17:13:53 crc kubenswrapper[4592]: I0929 17:13:53.045034 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Sep 29 17:13:55 crc kubenswrapper[4592]: I0929 17:13:55.210824 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Sep 29 17:14:04 crc kubenswrapper[4592]: I0929 17:14:04.979107 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:05 crc kubenswrapper[4592]: I0929 17:14:05.737938 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:09 crc kubenswrapper[4592]: I0929 17:14:09.707910 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="rabbitmq" containerID="cri-o://a0eaf2ddf3010ccc093f408dc1affbaf4ddec3e16636409a43b6646eb6f6d837" gracePeriod=604796 Sep 29 17:14:10 crc kubenswrapper[4592]: I0929 17:14:10.406511 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="rabbitmq" containerID="cri-o://442e5619db30e129f3dbf7b0ce4ec443150337c3ab97c9ea2d72da996393da54" gracePeriod=604796 Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.053688 4592 generic.go:334] "Generic (PLEG): container finished" podID="62319168-243a-4613-a565-d864d75110e2" containerID="a0eaf2ddf3010ccc093f408dc1affbaf4ddec3e16636409a43b6646eb6f6d837" exitCode=0 Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.053801 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerDied","Data":"a0eaf2ddf3010ccc093f408dc1affbaf4ddec3e16636409a43b6646eb6f6d837"} Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.298067 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330209 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330261 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330295 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330319 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330349 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330416 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxv2k\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330452 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330486 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330520 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330585 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.330631 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf\") pod \"62319168-243a-4613-a565-d864d75110e2\" (UID: \"62319168-243a-4613-a565-d864d75110e2\") " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.339106 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.349137 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.352311 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.360416 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.360767 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info" (OuterVolumeSpecName: "pod-info") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.363017 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.373208 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.389302 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k" (OuterVolumeSpecName: "kube-api-access-gxv2k") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "kube-api-access-gxv2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434375 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434414 4592 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434426 4592 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/62319168-243a-4613-a565-d864d75110e2-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434438 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/62319168-243a-4613-a565-d864d75110e2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434454 4592 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/62319168-243a-4613-a565-d864d75110e2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434464 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxv2k\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-kube-api-access-gxv2k\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434473 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.434497 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.444230 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data" (OuterVolumeSpecName: "config-data") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.511290 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf" (OuterVolumeSpecName: "server-conf") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.511561 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.537477 4592 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.537512 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/62319168-243a-4613-a565-d864d75110e2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.537523 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.589253 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "62319168-243a-4613-a565-d864d75110e2" (UID: "62319168-243a-4613-a565-d864d75110e2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:16 crc kubenswrapper[4592]: I0929 17:14:16.638067 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/62319168-243a-4613-a565-d864d75110e2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.077085 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"62319168-243a-4613-a565-d864d75110e2","Type":"ContainerDied","Data":"121eb8a448941d99d5c57493edc537928a98b24afc70c69eaa73901686adbdc7"} Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.077417 4592 scope.go:117] "RemoveContainer" containerID="a0eaf2ddf3010ccc093f408dc1affbaf4ddec3e16636409a43b6646eb6f6d837" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.079457 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.090975 4592 generic.go:334] "Generic (PLEG): container finished" podID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerID="442e5619db30e129f3dbf7b0ce4ec443150337c3ab97c9ea2d72da996393da54" exitCode=0 Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.091040 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerDied","Data":"442e5619db30e129f3dbf7b0ce4ec443150337c3ab97c9ea2d72da996393da54"} Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.173385 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.189846 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.199641 4592 scope.go:117] "RemoveContainer" containerID="6b3c15d30109ab050101c4ecbd3b5c0d3bea838101281d73bcfb26096ff58bad" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.260636 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.260675 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:17 crc kubenswrapper[4592]: E0929 17:14:17.261004 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="setup-container" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261019 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="setup-container" Sep 29 17:14:17 crc kubenswrapper[4592]: E0929 17:14:17.261039 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261047 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: E0929 17:14:17.261078 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261087 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: E0929 17:14:17.261111 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="setup-container" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261119 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="setup-container" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261349 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="62319168-243a-4613-a565-d864d75110e2" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.261366 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" containerName="rabbitmq" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.262407 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.277856 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.278020 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.278370 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.278541 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.278693 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.285598 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.285898 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-6kbmf" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.293842 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.349968 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350037 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350070 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350274 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350304 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350339 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350389 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350423 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350497 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350557 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcdhq\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.350598 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data\") pod \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\" (UID: \"a9840d1a-98b1-4ff7-9140-d21bacc11b0a\") " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.352998 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.358501 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.360470 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.361374 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq" (OuterVolumeSpecName: "kube-api-access-vcdhq") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "kube-api-access-vcdhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.361522 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.367436 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.368084 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.374677 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info" (OuterVolumeSpecName: "pod-info") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.445796 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf" (OuterVolumeSpecName: "server-conf") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453117 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453229 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453254 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453282 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453365 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453387 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453402 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453421 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-config-data\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453446 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fblg6\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-kube-api-access-fblg6\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453485 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453520 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453564 4592 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-pod-info\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453573 4592 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-plugins-conf\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453583 4592 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453592 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453599 4592 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-server-conf\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453607 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcdhq\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-kube-api-access-vcdhq\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453616 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453624 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.453641 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.474726 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.483426 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data" (OuterVolumeSpecName: "config-data") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558119 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558221 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558252 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558310 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558345 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558373 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558422 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558439 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558460 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-config-data\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558480 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fblg6\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-kube-api-access-fblg6\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558538 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558548 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.560243 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.560617 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.563610 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.563664 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.563998 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.558367 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.566599 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.567176 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.567692 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-config-data\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.568878 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.582650 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fblg6\" (UniqueName: \"kubernetes.io/projected/b1c359e8-5df5-4ef2-97ed-a3753c1a681d-kube-api-access-fblg6\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.586461 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "a9840d1a-98b1-4ff7-9140-d21bacc11b0a" (UID: "a9840d1a-98b1-4ff7-9140-d21bacc11b0a"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.647561 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-server-0\" (UID: \"b1c359e8-5df5-4ef2-97ed-a3753c1a681d\") " pod="openstack/rabbitmq-server-0" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.659754 4592 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/a9840d1a-98b1-4ff7-9140-d21bacc11b0a-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:17 crc kubenswrapper[4592]: I0929 17:14:17.918213 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.105824 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"a9840d1a-98b1-4ff7-9140-d21bacc11b0a","Type":"ContainerDied","Data":"0c0ddd4903d76dca2c5363c527ffaae2c35560bb14bff084400b869bb36c1b69"} Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.106125 4592 scope.go:117] "RemoveContainer" containerID="442e5619db30e129f3dbf7b0ce4ec443150337c3ab97c9ea2d72da996393da54" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.106323 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.157324 4592 scope.go:117] "RemoveContainer" containerID="e76507d181ba89b027da1aa7409c60822aa6079a9886d55ec0a23fd0d49cae9f" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.172627 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.204368 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.210338 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.212281 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.214796 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.223997 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.224223 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-xgpjr" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.224367 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.224501 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.224667 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.225007 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.247094 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276344 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276392 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276424 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276491 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7c4n\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-kube-api-access-x7c4n\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276519 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276620 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276671 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ae18931-f35a-4836-a054-06519e81aca0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276693 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ae18931-f35a-4836-a054-06519e81aca0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276716 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276743 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.276789 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.327346 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.379447 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ae18931-f35a-4836-a054-06519e81aca0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.379635 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ae18931-f35a-4836-a054-06519e81aca0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380167 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380454 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380604 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380765 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380835 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.380919 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.381066 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7c4n\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-kube-api-access-x7c4n\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.381134 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.381477 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.382283 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.383340 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.383548 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.383800 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.383907 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.386288 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9ae18931-f35a-4836-a054-06519e81aca0-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.390811 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.391625 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9ae18931-f35a-4836-a054-06519e81aca0-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.391754 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9ae18931-f35a-4836-a054-06519e81aca0-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.401744 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.410761 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7c4n\" (UniqueName: \"kubernetes.io/projected/9ae18931-f35a-4836-a054-06519e81aca0-kube-api-access-x7c4n\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.433357 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"9ae18931-f35a-4836-a054-06519e81aca0\") " pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:18 crc kubenswrapper[4592]: I0929 17:14:18.585376 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.122460 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b1c359e8-5df5-4ef2-97ed-a3753c1a681d","Type":"ContainerStarted","Data":"c9237b411135b25d9430d34daa51384913c4ed3a034bbe977804804129b2f8be"} Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.122762 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b1c359e8-5df5-4ef2-97ed-a3753c1a681d","Type":"ContainerStarted","Data":"c5cd94e8ac845673560c5594e478cc5143642d8fe5a46938648a3cc696804e71"} Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.147555 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.150995 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.159401 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.174916 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.205317 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.205800 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzkvh\" (UniqueName: \"kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.206163 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.206201 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.206497 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.206551 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.206617 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.209093 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62319168-243a-4613-a565-d864d75110e2" path="/var/lib/kubelet/pods/62319168-243a-4613-a565-d864d75110e2/volumes" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.214297 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9840d1a-98b1-4ff7-9140-d21bacc11b0a" path="/var/lib/kubelet/pods/a9840d1a-98b1-4ff7-9140-d21bacc11b0a/volumes" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.233960 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Sep 29 17:14:19 crc kubenswrapper[4592]: W0929 17:14:19.236380 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ae18931_f35a_4836_a054_06519e81aca0.slice/crio-9c2359eef03288dd04ea19d84b02b0b84344df6ea1d30dea541f5e1219209e7f WatchSource:0}: Error finding container 9c2359eef03288dd04ea19d84b02b0b84344df6ea1d30dea541f5e1219209e7f: Status 404 returned error can't find the container with id 9c2359eef03288dd04ea19d84b02b0b84344df6ea1d30dea541f5e1219209e7f Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.307890 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308288 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308349 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308372 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308441 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.308600 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzkvh\" (UniqueName: \"kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.310703 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.310815 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.311200 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.314337 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.314651 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.316746 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.327282 4592 scope.go:117] "RemoveContainer" containerID="39ac62e091dc9ad109fe54b362e5e7ad6bb8f0bce750236177764bead5b52157" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.335752 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzkvh\" (UniqueName: \"kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh\") pod \"dnsmasq-dns-5576978c7c-tkplq\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.443477 4592 scope.go:117] "RemoveContainer" containerID="5326605e8fbcd5a21eb61bfc445ac1cd609bfcad97dc8950b790451debaf91bf" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.476603 4592 scope.go:117] "RemoveContainer" containerID="4b8e4115fda1e0fd28559ce11dea38480f346f5edee0b3195a3f56733a31b652" Sep 29 17:14:19 crc kubenswrapper[4592]: I0929 17:14:19.500273 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:20 crc kubenswrapper[4592]: I0929 17:14:20.044326 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:20 crc kubenswrapper[4592]: W0929 17:14:20.056319 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b71237a_abf6_4bac_8353_9746ef3a862d.slice/crio-d1aa1447f3431002a129fd3ed22aa7ecea3fb59914368da9b93b42f2a3cab439 WatchSource:0}: Error finding container d1aa1447f3431002a129fd3ed22aa7ecea3fb59914368da9b93b42f2a3cab439: Status 404 returned error can't find the container with id d1aa1447f3431002a129fd3ed22aa7ecea3fb59914368da9b93b42f2a3cab439 Sep 29 17:14:20 crc kubenswrapper[4592]: I0929 17:14:20.137163 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" event={"ID":"2b71237a-abf6-4bac-8353-9746ef3a862d","Type":"ContainerStarted","Data":"d1aa1447f3431002a129fd3ed22aa7ecea3fb59914368da9b93b42f2a3cab439"} Sep 29 17:14:20 crc kubenswrapper[4592]: I0929 17:14:20.144883 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9ae18931-f35a-4836-a054-06519e81aca0","Type":"ContainerStarted","Data":"125b7216ab8aaa0c3e6aa65e30c1d7fa7e51ad17d423ed0002fc21242d6b9f8f"} Sep 29 17:14:20 crc kubenswrapper[4592]: I0929 17:14:20.144935 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9ae18931-f35a-4836-a054-06519e81aca0","Type":"ContainerStarted","Data":"9c2359eef03288dd04ea19d84b02b0b84344df6ea1d30dea541f5e1219209e7f"} Sep 29 17:14:21 crc kubenswrapper[4592]: I0929 17:14:21.155162 4592 generic.go:334] "Generic (PLEG): container finished" podID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerID="c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff" exitCode=0 Sep 29 17:14:21 crc kubenswrapper[4592]: I0929 17:14:21.155500 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" event={"ID":"2b71237a-abf6-4bac-8353-9746ef3a862d","Type":"ContainerDied","Data":"c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff"} Sep 29 17:14:22 crc kubenswrapper[4592]: I0929 17:14:22.170389 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" event={"ID":"2b71237a-abf6-4bac-8353-9746ef3a862d","Type":"ContainerStarted","Data":"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30"} Sep 29 17:14:22 crc kubenswrapper[4592]: I0929 17:14:22.170771 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:22 crc kubenswrapper[4592]: I0929 17:14:22.196376 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" podStartSLOduration=3.196353568 podStartE2EDuration="3.196353568s" podCreationTimestamp="2025-09-29 17:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:14:22.192439315 +0000 UTC m=+1392.340217006" watchObservedRunningTime="2025-09-29 17:14:22.196353568 +0000 UTC m=+1392.344131249" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.502494 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.615594 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.616008 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="dnsmasq-dns" containerID="cri-o://fda0ce31a58c6b36b96b469cfdaffdba992eb2d134aba159ea0b39c5b2be149b" gracePeriod=10 Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.797347 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-vzqb2"] Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.801019 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.811997 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-config\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812346 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-svc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812467 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812605 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812726 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812839 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbhwc\" (UniqueName: \"kubernetes.io/projected/c242d2c5-5bda-4cd3-9324-7fd5d7403646-kube-api-access-nbhwc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.812952 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.821591 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-vzqb2"] Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915294 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915355 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbhwc\" (UniqueName: \"kubernetes.io/projected/c242d2c5-5bda-4cd3-9324-7fd5d7403646-kube-api-access-nbhwc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915394 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915432 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-config\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915476 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-svc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915504 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.915547 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.916322 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-openstack-edpm-ipam\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.916726 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-nb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.917276 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-config\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.917656 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-swift-storage-0\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.917791 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-ovsdbserver-sb\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.918565 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c242d2c5-5bda-4cd3-9324-7fd5d7403646-dns-svc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:29 crc kubenswrapper[4592]: I0929 17:14:29.957495 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbhwc\" (UniqueName: \"kubernetes.io/projected/c242d2c5-5bda-4cd3-9324-7fd5d7403646-kube-api-access-nbhwc\") pod \"dnsmasq-dns-667c9c995c-vzqb2\" (UID: \"c242d2c5-5bda-4cd3-9324-7fd5d7403646\") " pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.076534 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.080157 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.103467 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.147598 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.232356 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.232414 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.232463 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q8th\" (UniqueName: \"kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.309959 4592 generic.go:334] "Generic (PLEG): container finished" podID="6330cf10-7696-474e-b294-e7f668d89c34" containerID="fda0ce31a58c6b36b96b469cfdaffdba992eb2d134aba159ea0b39c5b2be149b" exitCode=0 Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.309999 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" event={"ID":"6330cf10-7696-474e-b294-e7f668d89c34","Type":"ContainerDied","Data":"fda0ce31a58c6b36b96b469cfdaffdba992eb2d134aba159ea0b39c5b2be149b"} Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.334192 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.334671 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.334622 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.334756 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q8th\" (UniqueName: \"kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.334981 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.354010 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q8th\" (UniqueName: \"kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th\") pod \"community-operators-vjnjw\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.403638 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.407276 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.538655 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.538936 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc66s\" (UniqueName: \"kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.538962 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.538994 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.539027 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.539053 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb\") pod \"6330cf10-7696-474e-b294-e7f668d89c34\" (UID: \"6330cf10-7696-474e-b294-e7f668d89c34\") " Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.556706 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s" (OuterVolumeSpecName: "kube-api-access-pc66s") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "kube-api-access-pc66s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.627342 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.635035 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.637124 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config" (OuterVolumeSpecName: "config") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.643715 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc66s\" (UniqueName: \"kubernetes.io/projected/6330cf10-7696-474e-b294-e7f668d89c34-kube-api-access-pc66s\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.643753 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.643763 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.643771 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.654636 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.668403 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6330cf10-7696-474e-b294-e7f668d89c34" (UID: "6330cf10-7696-474e-b294-e7f668d89c34"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.745789 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.745826 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6330cf10-7696-474e-b294-e7f668d89c34-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.812653 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667c9c995c-vzqb2"] Sep 29 17:14:30 crc kubenswrapper[4592]: I0929 17:14:30.950326 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:30 crc kubenswrapper[4592]: W0929 17:14:30.952341 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd01faf22_db57_4edb_98f3_c7e84cc626ba.slice/crio-dd2a52145ee83a6b54f2b50d7b1305bdce755d081f02902bc9df7ade77c74209 WatchSource:0}: Error finding container dd2a52145ee83a6b54f2b50d7b1305bdce755d081f02902bc9df7ade77c74209: Status 404 returned error can't find the container with id dd2a52145ee83a6b54f2b50d7b1305bdce755d081f02902bc9df7ade77c74209 Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.320946 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" event={"ID":"6330cf10-7696-474e-b294-e7f668d89c34","Type":"ContainerDied","Data":"6d94d677963ed84066ec9b0c4848c1d0e82d570f4ef54c45222cc1bb65d58f0d"} Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.321004 4592 scope.go:117] "RemoveContainer" containerID="fda0ce31a58c6b36b96b469cfdaffdba992eb2d134aba159ea0b39c5b2be149b" Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.320960 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-qkvjb" Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.323335 4592 generic.go:334] "Generic (PLEG): container finished" podID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerID="a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36" exitCode=0 Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.323371 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerDied","Data":"a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36"} Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.323425 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerStarted","Data":"dd2a52145ee83a6b54f2b50d7b1305bdce755d081f02902bc9df7ade77c74209"} Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.328215 4592 generic.go:334] "Generic (PLEG): container finished" podID="c242d2c5-5bda-4cd3-9324-7fd5d7403646" containerID="5faf6aa58e69173a9491feab85a383da17080dbdf9d4170ef9679eb31d04bfec" exitCode=0 Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.328258 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" event={"ID":"c242d2c5-5bda-4cd3-9324-7fd5d7403646","Type":"ContainerDied","Data":"5faf6aa58e69173a9491feab85a383da17080dbdf9d4170ef9679eb31d04bfec"} Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.328288 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" event={"ID":"c242d2c5-5bda-4cd3-9324-7fd5d7403646","Type":"ContainerStarted","Data":"95d32860745aaa48fed38caad68a0942af78f26a2a36464e8aa3e84639e2b787"} Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.357276 4592 scope.go:117] "RemoveContainer" containerID="00fb7b3ebf80f0250565537fe4e0ec429452cf3525a1039c2cd2cf4488f28e7a" Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.412363 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:14:31 crc kubenswrapper[4592]: I0929 17:14:31.423974 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-qkvjb"] Sep 29 17:14:32 crc kubenswrapper[4592]: I0929 17:14:32.340520 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerStarted","Data":"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2"} Sep 29 17:14:32 crc kubenswrapper[4592]: I0929 17:14:32.343020 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" event={"ID":"c242d2c5-5bda-4cd3-9324-7fd5d7403646","Type":"ContainerStarted","Data":"a1f5409198a6008fc4cc84910df231732b87711330d5ab227c2fb9913db790a9"} Sep 29 17:14:32 crc kubenswrapper[4592]: I0929 17:14:32.343271 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:32 crc kubenswrapper[4592]: I0929 17:14:32.397086 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" podStartSLOduration=3.397069229 podStartE2EDuration="3.397069229s" podCreationTimestamp="2025-09-29 17:14:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:14:32.381360618 +0000 UTC m=+1402.529138299" watchObservedRunningTime="2025-09-29 17:14:32.397069229 +0000 UTC m=+1402.544846910" Sep 29 17:14:33 crc kubenswrapper[4592]: I0929 17:14:33.195343 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6330cf10-7696-474e-b294-e7f668d89c34" path="/var/lib/kubelet/pods/6330cf10-7696-474e-b294-e7f668d89c34/volumes" Sep 29 17:14:34 crc kubenswrapper[4592]: I0929 17:14:34.367056 4592 generic.go:334] "Generic (PLEG): container finished" podID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerID="d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2" exitCode=0 Sep 29 17:14:34 crc kubenswrapper[4592]: I0929 17:14:34.367115 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerDied","Data":"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2"} Sep 29 17:14:35 crc kubenswrapper[4592]: I0929 17:14:35.381026 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerStarted","Data":"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29"} Sep 29 17:14:35 crc kubenswrapper[4592]: I0929 17:14:35.409904 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vjnjw" podStartSLOduration=1.816556557 podStartE2EDuration="5.409883663s" podCreationTimestamp="2025-09-29 17:14:30 +0000 UTC" firstStartedPulling="2025-09-29 17:14:31.324856289 +0000 UTC m=+1401.472633970" lastFinishedPulling="2025-09-29 17:14:34.918183395 +0000 UTC m=+1405.065961076" observedRunningTime="2025-09-29 17:14:35.402275596 +0000 UTC m=+1405.550053277" watchObservedRunningTime="2025-09-29 17:14:35.409883663 +0000 UTC m=+1405.557661344" Sep 29 17:14:40 crc kubenswrapper[4592]: I0929 17:14:40.149957 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667c9c995c-vzqb2" Sep 29 17:14:40 crc kubenswrapper[4592]: I0929 17:14:40.230753 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:40 crc kubenswrapper[4592]: I0929 17:14:40.233636 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="dnsmasq-dns" containerID="cri-o://558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30" gracePeriod=10 Sep 29 17:14:40 crc kubenswrapper[4592]: I0929 17:14:40.407912 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:40 crc kubenswrapper[4592]: I0929 17:14:40.409288 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.346685 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.438035 4592 generic.go:334] "Generic (PLEG): container finished" podID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerID="558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30" exitCode=0 Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.438455 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" event={"ID":"2b71237a-abf6-4bac-8353-9746ef3a862d","Type":"ContainerDied","Data":"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30"} Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.438487 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" event={"ID":"2b71237a-abf6-4bac-8353-9746ef3a862d","Type":"ContainerDied","Data":"d1aa1447f3431002a129fd3ed22aa7ecea3fb59914368da9b93b42f2a3cab439"} Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.438505 4592 scope.go:117] "RemoveContainer" containerID="558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.438641 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-tkplq" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461487 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461533 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461619 4592 scope.go:117] "RemoveContainer" containerID="c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461659 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461740 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461790 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzkvh\" (UniqueName: \"kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461878 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.461935 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config\") pod \"2b71237a-abf6-4bac-8353-9746ef3a862d\" (UID: \"2b71237a-abf6-4bac-8353-9746ef3a862d\") " Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.471623 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh" (OuterVolumeSpecName: "kube-api-access-bzkvh") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "kube-api-access-bzkvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.504218 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-vjnjw" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="registry-server" probeResult="failure" output=< Sep 29 17:14:41 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:14:41 crc kubenswrapper[4592]: > Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.565041 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config" (OuterVolumeSpecName: "config") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.565900 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzkvh\" (UniqueName: \"kubernetes.io/projected/2b71237a-abf6-4bac-8353-9746ef3a862d-kube-api-access-bzkvh\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.565934 4592 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-config\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.566386 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.566758 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.576590 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.576660 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.593681 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2b71237a-abf6-4bac-8353-9746ef3a862d" (UID: "2b71237a-abf6-4bac-8353-9746ef3a862d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.668863 4592 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-svc\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.668921 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.668944 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.668958 4592 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.668972 4592 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b71237a-abf6-4bac-8353-9746ef3a862d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.689512 4592 scope.go:117] "RemoveContainer" containerID="558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30" Sep 29 17:14:41 crc kubenswrapper[4592]: E0929 17:14:41.689967 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30\": container with ID starting with 558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30 not found: ID does not exist" containerID="558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.690000 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30"} err="failed to get container status \"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30\": rpc error: code = NotFound desc = could not find container \"558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30\": container with ID starting with 558f773a787801cf7291a4e76d0e1adb2309ecc82a19d9e3a2ac00cf8f568b30 not found: ID does not exist" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.690027 4592 scope.go:117] "RemoveContainer" containerID="c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff" Sep 29 17:14:41 crc kubenswrapper[4592]: E0929 17:14:41.690415 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff\": container with ID starting with c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff not found: ID does not exist" containerID="c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.690452 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff"} err="failed to get container status \"c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff\": rpc error: code = NotFound desc = could not find container \"c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff\": container with ID starting with c56d58683f24b3872b6e064f96af29d9aa5f354050b35ab5ad4e82a06d9e6eff not found: ID does not exist" Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.774850 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:41 crc kubenswrapper[4592]: I0929 17:14:41.783359 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-tkplq"] Sep 29 17:14:43 crc kubenswrapper[4592]: I0929 17:14:43.195056 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" path="/var/lib/kubelet/pods/2b71237a-abf6-4bac-8353-9746ef3a862d/volumes" Sep 29 17:14:49 crc kubenswrapper[4592]: I0929 17:14:49.532069 4592 generic.go:334] "Generic (PLEG): container finished" podID="b1c359e8-5df5-4ef2-97ed-a3753c1a681d" containerID="c9237b411135b25d9430d34daa51384913c4ed3a034bbe977804804129b2f8be" exitCode=0 Sep 29 17:14:49 crc kubenswrapper[4592]: I0929 17:14:49.532155 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b1c359e8-5df5-4ef2-97ed-a3753c1a681d","Type":"ContainerDied","Data":"c9237b411135b25d9430d34daa51384913c4ed3a034bbe977804804129b2f8be"} Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.494607 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.545636 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9ae18931-f35a-4836-a054-06519e81aca0","Type":"ContainerDied","Data":"125b7216ab8aaa0c3e6aa65e30c1d7fa7e51ad17d423ed0002fc21242d6b9f8f"} Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.545563 4592 generic.go:334] "Generic (PLEG): container finished" podID="9ae18931-f35a-4836-a054-06519e81aca0" containerID="125b7216ab8aaa0c3e6aa65e30c1d7fa7e51ad17d423ed0002fc21242d6b9f8f" exitCode=0 Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.558443 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b1c359e8-5df5-4ef2-97ed-a3753c1a681d","Type":"ContainerStarted","Data":"f56760e60dee1b671f6983a1e2953920a6249174cfbc5d9dda5cb15b465dd978"} Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.559882 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.624291 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.664121 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=33.66410038 podStartE2EDuration="33.66410038s" podCreationTimestamp="2025-09-29 17:14:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:14:50.643415645 +0000 UTC m=+1420.791193336" watchObservedRunningTime="2025-09-29 17:14:50.66410038 +0000 UTC m=+1420.811878061" Sep 29 17:14:50 crc kubenswrapper[4592]: I0929 17:14:50.746090 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:51 crc kubenswrapper[4592]: I0929 17:14:51.568352 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vjnjw" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="registry-server" containerID="cri-o://7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29" gracePeriod=2 Sep 29 17:14:51 crc kubenswrapper[4592]: I0929 17:14:51.568843 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"9ae18931-f35a-4836-a054-06519e81aca0","Type":"ContainerStarted","Data":"655362413fc581328dcabe951c333896712e5e54d769524e9cbf460c778ecb81"} Sep 29 17:14:51 crc kubenswrapper[4592]: I0929 17:14:51.569685 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.022870 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.050810 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=34.050793428 podStartE2EDuration="34.050793428s" podCreationTimestamp="2025-09-29 17:14:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:14:51.608701254 +0000 UTC m=+1421.756478935" watchObservedRunningTime="2025-09-29 17:14:52.050793428 +0000 UTC m=+1422.198571109" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.093421 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q8th\" (UniqueName: \"kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th\") pod \"d01faf22-db57-4edb-98f3-c7e84cc626ba\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.093588 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities\") pod \"d01faf22-db57-4edb-98f3-c7e84cc626ba\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.093614 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content\") pod \"d01faf22-db57-4edb-98f3-c7e84cc626ba\" (UID: \"d01faf22-db57-4edb-98f3-c7e84cc626ba\") " Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.094997 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities" (OuterVolumeSpecName: "utilities") pod "d01faf22-db57-4edb-98f3-c7e84cc626ba" (UID: "d01faf22-db57-4edb-98f3-c7e84cc626ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.111682 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th" (OuterVolumeSpecName: "kube-api-access-6q8th") pod "d01faf22-db57-4edb-98f3-c7e84cc626ba" (UID: "d01faf22-db57-4edb-98f3-c7e84cc626ba"). InnerVolumeSpecName "kube-api-access-6q8th". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.192380 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d01faf22-db57-4edb-98f3-c7e84cc626ba" (UID: "d01faf22-db57-4edb-98f3-c7e84cc626ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.195757 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.195790 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01faf22-db57-4edb-98f3-c7e84cc626ba-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.195802 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q8th\" (UniqueName: \"kubernetes.io/projected/d01faf22-db57-4edb-98f3-c7e84cc626ba-kube-api-access-6q8th\") on node \"crc\" DevicePath \"\"" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.579269 4592 generic.go:334] "Generic (PLEG): container finished" podID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerID="7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29" exitCode=0 Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.579340 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vjnjw" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.579415 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerDied","Data":"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29"} Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.579444 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vjnjw" event={"ID":"d01faf22-db57-4edb-98f3-c7e84cc626ba","Type":"ContainerDied","Data":"dd2a52145ee83a6b54f2b50d7b1305bdce755d081f02902bc9df7ade77c74209"} Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.579481 4592 scope.go:117] "RemoveContainer" containerID="7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.605463 4592 scope.go:117] "RemoveContainer" containerID="d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.628725 4592 scope.go:117] "RemoveContainer" containerID="a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.648126 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.667671 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vjnjw"] Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.718311 4592 scope.go:117] "RemoveContainer" containerID="7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29" Sep 29 17:14:52 crc kubenswrapper[4592]: E0929 17:14:52.718872 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29\": container with ID starting with 7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29 not found: ID does not exist" containerID="7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.718915 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29"} err="failed to get container status \"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29\": rpc error: code = NotFound desc = could not find container \"7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29\": container with ID starting with 7bffc7166142ad47c7a5548da8c9e54a12350a8885c55f2c0395087b76ed5b29 not found: ID does not exist" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.718942 4592 scope.go:117] "RemoveContainer" containerID="d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2" Sep 29 17:14:52 crc kubenswrapper[4592]: E0929 17:14:52.721201 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2\": container with ID starting with d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2 not found: ID does not exist" containerID="d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.721242 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2"} err="failed to get container status \"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2\": rpc error: code = NotFound desc = could not find container \"d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2\": container with ID starting with d8b8e46dcf116bdac230a37919e019165e59e277984c99354d392aa3088786c2 not found: ID does not exist" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.721272 4592 scope.go:117] "RemoveContainer" containerID="a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36" Sep 29 17:14:52 crc kubenswrapper[4592]: E0929 17:14:52.721889 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36\": container with ID starting with a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36 not found: ID does not exist" containerID="a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36" Sep 29 17:14:52 crc kubenswrapper[4592]: I0929 17:14:52.721921 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36"} err="failed to get container status \"a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36\": rpc error: code = NotFound desc = could not find container \"a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36\": container with ID starting with a1423f434797c0524c890a2cba0b53da32cca6c9cf7fd13dd5989fcc46297c36 not found: ID does not exist" Sep 29 17:14:53 crc kubenswrapper[4592]: I0929 17:14:53.193284 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" path="/var/lib/kubelet/pods/d01faf22-db57-4edb-98f3-c7e84cc626ba/volumes" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.139251 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq"] Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141215 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141298 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141360 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="extract-content" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141407 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="extract-content" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141472 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="init" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141524 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="init" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141589 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="init" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141641 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="init" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141696 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="registry-server" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141745 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="registry-server" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141801 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141848 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: E0929 17:15:00.141903 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="extract-utilities" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.141955 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="extract-utilities" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.142189 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d01faf22-db57-4edb-98f3-c7e84cc626ba" containerName="registry-server" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.142255 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b71237a-abf6-4bac-8353-9746ef3a862d" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.142306 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="6330cf10-7696-474e-b294-e7f668d89c34" containerName="dnsmasq-dns" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.143061 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.147841 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.148474 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.156520 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq"] Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.250911 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.251014 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49p9x\" (UniqueName: \"kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.251121 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.353303 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49p9x\" (UniqueName: \"kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.353406 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.353492 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.354735 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.362848 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.373357 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49p9x\" (UniqueName: \"kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x\") pod \"collect-profiles-29319435-jb2fq\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.466930 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.883048 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.883349 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:15:00 crc kubenswrapper[4592]: I0929 17:15:00.973618 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq"] Sep 29 17:15:01 crc kubenswrapper[4592]: I0929 17:15:01.698245 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" event={"ID":"81f64e86-1b9e-4c06-8020-5e7d3c488c01","Type":"ContainerStarted","Data":"5dc15646fe904fee748442c867975a622f8b3209bb00cec68cba66171511f0e4"} Sep 29 17:15:01 crc kubenswrapper[4592]: I0929 17:15:01.698301 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" event={"ID":"81f64e86-1b9e-4c06-8020-5e7d3c488c01","Type":"ContainerStarted","Data":"5363a044aa43d93524bd9fdf327a52bfa3db84c80e8290bd8f8d0483e95c64ca"} Sep 29 17:15:01 crc kubenswrapper[4592]: I0929 17:15:01.713403 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" podStartSLOduration=1.7133884940000002 podStartE2EDuration="1.713388494s" podCreationTimestamp="2025-09-29 17:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:15:01.712793888 +0000 UTC m=+1431.860571579" watchObservedRunningTime="2025-09-29 17:15:01.713388494 +0000 UTC m=+1431.861166175" Sep 29 17:15:02 crc kubenswrapper[4592]: I0929 17:15:02.713437 4592 generic.go:334] "Generic (PLEG): container finished" podID="81f64e86-1b9e-4c06-8020-5e7d3c488c01" containerID="5dc15646fe904fee748442c867975a622f8b3209bb00cec68cba66171511f0e4" exitCode=0 Sep 29 17:15:02 crc kubenswrapper[4592]: I0929 17:15:02.713539 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" event={"ID":"81f64e86-1b9e-4c06-8020-5e7d3c488c01","Type":"ContainerDied","Data":"5dc15646fe904fee748442c867975a622f8b3209bb00cec68cba66171511f0e4"} Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.076458 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.132390 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume\") pod \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.132430 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume\") pod \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.132618 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49p9x\" (UniqueName: \"kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x\") pod \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\" (UID: \"81f64e86-1b9e-4c06-8020-5e7d3c488c01\") " Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.133044 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume" (OuterVolumeSpecName: "config-volume") pod "81f64e86-1b9e-4c06-8020-5e7d3c488c01" (UID: "81f64e86-1b9e-4c06-8020-5e7d3c488c01"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.138267 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x" (OuterVolumeSpecName: "kube-api-access-49p9x") pod "81f64e86-1b9e-4c06-8020-5e7d3c488c01" (UID: "81f64e86-1b9e-4c06-8020-5e7d3c488c01"). InnerVolumeSpecName "kube-api-access-49p9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.139412 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "81f64e86-1b9e-4c06-8020-5e7d3c488c01" (UID: "81f64e86-1b9e-4c06-8020-5e7d3c488c01"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.193294 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm"] Sep 29 17:15:04 crc kubenswrapper[4592]: E0929 17:15:04.193689 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f64e86-1b9e-4c06-8020-5e7d3c488c01" containerName="collect-profiles" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.193706 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f64e86-1b9e-4c06-8020-5e7d3c488c01" containerName="collect-profiles" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.193886 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f64e86-1b9e-4c06-8020-5e7d3c488c01" containerName="collect-profiles" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.194520 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.197557 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.197711 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.199014 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.200955 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.213843 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm"] Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.234818 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkplc\" (UniqueName: \"kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.234923 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.234960 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.235006 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.235062 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/81f64e86-1b9e-4c06-8020-5e7d3c488c01-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.235075 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/81f64e86-1b9e-4c06-8020-5e7d3c488c01-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.235084 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49p9x\" (UniqueName: \"kubernetes.io/projected/81f64e86-1b9e-4c06-8020-5e7d3c488c01-kube-api-access-49p9x\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.336561 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.336641 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.336715 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.336788 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkplc\" (UniqueName: \"kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.340976 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.342781 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.345442 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.362022 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkplc\" (UniqueName: \"kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.579762 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.753612 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" event={"ID":"81f64e86-1b9e-4c06-8020-5e7d3c488c01","Type":"ContainerDied","Data":"5363a044aa43d93524bd9fdf327a52bfa3db84c80e8290bd8f8d0483e95c64ca"} Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.753842 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5363a044aa43d93524bd9fdf327a52bfa3db84c80e8290bd8f8d0483e95c64ca" Sep 29 17:15:04 crc kubenswrapper[4592]: I0929 17:15:04.753897 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq" Sep 29 17:15:05 crc kubenswrapper[4592]: I0929 17:15:05.590077 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm"] Sep 29 17:15:05 crc kubenswrapper[4592]: I0929 17:15:05.765038 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" event={"ID":"abd885d0-dbac-4845-8a3e-2454abf4d652","Type":"ContainerStarted","Data":"3d65334a484ffbe661883f11cb3c58fe9372899be69fc739e5316dc546ccd9d0"} Sep 29 17:15:07 crc kubenswrapper[4592]: I0929 17:15:07.920337 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Sep 29 17:15:08 crc kubenswrapper[4592]: I0929 17:15:08.588339 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Sep 29 17:15:15 crc kubenswrapper[4592]: I0929 17:15:15.334199 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:15:15 crc kubenswrapper[4592]: I0929 17:15:15.855405 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" event={"ID":"abd885d0-dbac-4845-8a3e-2454abf4d652","Type":"ContainerStarted","Data":"1c5ba3e2b6fa1477b8c7b882e9ddb8c714fd9045989b165b0b5aaa29fcd2d6c5"} Sep 29 17:15:15 crc kubenswrapper[4592]: I0929 17:15:15.881487 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" podStartSLOduration=2.143705557 podStartE2EDuration="11.881467093s" podCreationTimestamp="2025-09-29 17:15:04 +0000 UTC" firstStartedPulling="2025-09-29 17:15:05.593713484 +0000 UTC m=+1435.741491165" lastFinishedPulling="2025-09-29 17:15:15.331475 +0000 UTC m=+1445.479252701" observedRunningTime="2025-09-29 17:15:15.870018009 +0000 UTC m=+1446.017795700" watchObservedRunningTime="2025-09-29 17:15:15.881467093 +0000 UTC m=+1446.029244774" Sep 29 17:15:19 crc kubenswrapper[4592]: I0929 17:15:19.800332 4592 scope.go:117] "RemoveContainer" containerID="799499baf640565a941a7359236338472fdac23e1ea3e20950cdba5240f415c2" Sep 29 17:15:21 crc kubenswrapper[4592]: I0929 17:15:21.942967 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:21 crc kubenswrapper[4592]: I0929 17:15:21.946487 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:21 crc kubenswrapper[4592]: I0929 17:15:21.978101 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.069405 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.069458 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx648\" (UniqueName: \"kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.069501 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.171452 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.171499 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx648\" (UniqueName: \"kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.171530 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.172137 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.172634 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.195476 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx648\" (UniqueName: \"kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648\") pod \"redhat-operators-4f6fl\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.270884 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.719245 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:22 crc kubenswrapper[4592]: I0929 17:15:22.922406 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerStarted","Data":"586f3cfdb91d0e1f1aeac5f63335f8a3b97995d1c67f7f786a10a54393d7ee30"} Sep 29 17:15:23 crc kubenswrapper[4592]: I0929 17:15:23.934950 4592 generic.go:334] "Generic (PLEG): container finished" podID="39476971-ab8d-461e-bdac-865ee86327b3" containerID="a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151" exitCode=0 Sep 29 17:15:23 crc kubenswrapper[4592]: I0929 17:15:23.934994 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerDied","Data":"a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151"} Sep 29 17:15:25 crc kubenswrapper[4592]: I0929 17:15:25.954557 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerStarted","Data":"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7"} Sep 29 17:15:27 crc kubenswrapper[4592]: I0929 17:15:27.974398 4592 generic.go:334] "Generic (PLEG): container finished" podID="abd885d0-dbac-4845-8a3e-2454abf4d652" containerID="1c5ba3e2b6fa1477b8c7b882e9ddb8c714fd9045989b165b0b5aaa29fcd2d6c5" exitCode=0 Sep 29 17:15:27 crc kubenswrapper[4592]: I0929 17:15:27.974483 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" event={"ID":"abd885d0-dbac-4845-8a3e-2454abf4d652","Type":"ContainerDied","Data":"1c5ba3e2b6fa1477b8c7b882e9ddb8c714fd9045989b165b0b5aaa29fcd2d6c5"} Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.427218 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.552656 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory\") pod \"abd885d0-dbac-4845-8a3e-2454abf4d652\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.552841 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key\") pod \"abd885d0-dbac-4845-8a3e-2454abf4d652\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.552938 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle\") pod \"abd885d0-dbac-4845-8a3e-2454abf4d652\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.552980 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkplc\" (UniqueName: \"kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc\") pod \"abd885d0-dbac-4845-8a3e-2454abf4d652\" (UID: \"abd885d0-dbac-4845-8a3e-2454abf4d652\") " Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.558019 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc" (OuterVolumeSpecName: "kube-api-access-wkplc") pod "abd885d0-dbac-4845-8a3e-2454abf4d652" (UID: "abd885d0-dbac-4845-8a3e-2454abf4d652"). InnerVolumeSpecName "kube-api-access-wkplc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.576346 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "abd885d0-dbac-4845-8a3e-2454abf4d652" (UID: "abd885d0-dbac-4845-8a3e-2454abf4d652"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.585917 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory" (OuterVolumeSpecName: "inventory") pod "abd885d0-dbac-4845-8a3e-2454abf4d652" (UID: "abd885d0-dbac-4845-8a3e-2454abf4d652"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.587223 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "abd885d0-dbac-4845-8a3e-2454abf4d652" (UID: "abd885d0-dbac-4845-8a3e-2454abf4d652"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.655074 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.655113 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.655125 4592 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd885d0-dbac-4845-8a3e-2454abf4d652-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.655139 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkplc\" (UniqueName: \"kubernetes.io/projected/abd885d0-dbac-4845-8a3e-2454abf4d652-kube-api-access-wkplc\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.996220 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" event={"ID":"abd885d0-dbac-4845-8a3e-2454abf4d652","Type":"ContainerDied","Data":"3d65334a484ffbe661883f11cb3c58fe9372899be69fc739e5316dc546ccd9d0"} Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.996265 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d65334a484ffbe661883f11cb3c58fe9372899be69fc739e5316dc546ccd9d0" Sep 29 17:15:29 crc kubenswrapper[4592]: I0929 17:15:29.996631 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.080584 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz"] Sep 29 17:15:30 crc kubenswrapper[4592]: E0929 17:15:30.080947 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd885d0-dbac-4845-8a3e-2454abf4d652" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.080968 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd885d0-dbac-4845-8a3e-2454abf4d652" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.081188 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd885d0-dbac-4845-8a3e-2454abf4d652" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.081785 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.086746 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.086924 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.086971 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.087138 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.104195 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz"] Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.277421 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.277476 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dw44\" (UniqueName: \"kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.277686 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.378895 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.378979 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dw44\" (UniqueName: \"kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.379055 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.384624 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.387884 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.410838 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dw44\" (UniqueName: \"kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-gqflz\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.695957 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.883276 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:15:30 crc kubenswrapper[4592]: I0929 17:15:30.883687 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:15:31 crc kubenswrapper[4592]: I0929 17:15:31.010551 4592 generic.go:334] "Generic (PLEG): container finished" podID="39476971-ab8d-461e-bdac-865ee86327b3" containerID="88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7" exitCode=0 Sep 29 17:15:31 crc kubenswrapper[4592]: I0929 17:15:31.010598 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerDied","Data":"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7"} Sep 29 17:15:31 crc kubenswrapper[4592]: I0929 17:15:31.314892 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz"] Sep 29 17:15:32 crc kubenswrapper[4592]: I0929 17:15:32.025056 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerStarted","Data":"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b"} Sep 29 17:15:32 crc kubenswrapper[4592]: I0929 17:15:32.029779 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" event={"ID":"894df7ab-ced1-483a-98a5-2e7e496f1578","Type":"ContainerStarted","Data":"e3e55da994991ff9d090b425fa67d36c16f724c2b9093b2edd4cfaea639f60d8"} Sep 29 17:15:32 crc kubenswrapper[4592]: I0929 17:15:32.062119 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4f6fl" podStartSLOduration=3.468990212 podStartE2EDuration="11.062044559s" podCreationTimestamp="2025-09-29 17:15:21 +0000 UTC" firstStartedPulling="2025-09-29 17:15:23.937132516 +0000 UTC m=+1454.084910197" lastFinishedPulling="2025-09-29 17:15:31.530186873 +0000 UTC m=+1461.677964544" observedRunningTime="2025-09-29 17:15:32.045547917 +0000 UTC m=+1462.193325598" watchObservedRunningTime="2025-09-29 17:15:32.062044559 +0000 UTC m=+1462.209822240" Sep 29 17:15:32 crc kubenswrapper[4592]: I0929 17:15:32.272013 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:32 crc kubenswrapper[4592]: I0929 17:15:32.272060 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:33 crc kubenswrapper[4592]: I0929 17:15:33.053638 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" event={"ID":"894df7ab-ced1-483a-98a5-2e7e496f1578","Type":"ContainerStarted","Data":"1333320363d9c14f0a0c2a2095573180a877c561c94ce85d7fd4a0fe36709feb"} Sep 29 17:15:33 crc kubenswrapper[4592]: I0929 17:15:33.077094 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" podStartSLOduration=2.458819259 podStartE2EDuration="3.077068606s" podCreationTimestamp="2025-09-29 17:15:30 +0000 UTC" firstStartedPulling="2025-09-29 17:15:31.324831468 +0000 UTC m=+1461.472609149" lastFinishedPulling="2025-09-29 17:15:31.943080785 +0000 UTC m=+1462.090858496" observedRunningTime="2025-09-29 17:15:33.074017781 +0000 UTC m=+1463.221795462" watchObservedRunningTime="2025-09-29 17:15:33.077068606 +0000 UTC m=+1463.224846307" Sep 29 17:15:33 crc kubenswrapper[4592]: I0929 17:15:33.316010 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4f6fl" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" probeResult="failure" output=< Sep 29 17:15:33 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:15:33 crc kubenswrapper[4592]: > Sep 29 17:15:36 crc kubenswrapper[4592]: I0929 17:15:36.086957 4592 generic.go:334] "Generic (PLEG): container finished" podID="894df7ab-ced1-483a-98a5-2e7e496f1578" containerID="1333320363d9c14f0a0c2a2095573180a877c561c94ce85d7fd4a0fe36709feb" exitCode=0 Sep 29 17:15:36 crc kubenswrapper[4592]: I0929 17:15:36.087077 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" event={"ID":"894df7ab-ced1-483a-98a5-2e7e496f1578","Type":"ContainerDied","Data":"1333320363d9c14f0a0c2a2095573180a877c561c94ce85d7fd4a0fe36709feb"} Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.517536 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.542094 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dw44\" (UniqueName: \"kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44\") pod \"894df7ab-ced1-483a-98a5-2e7e496f1578\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.542584 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory\") pod \"894df7ab-ced1-483a-98a5-2e7e496f1578\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.542617 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key\") pod \"894df7ab-ced1-483a-98a5-2e7e496f1578\" (UID: \"894df7ab-ced1-483a-98a5-2e7e496f1578\") " Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.549717 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44" (OuterVolumeSpecName: "kube-api-access-2dw44") pod "894df7ab-ced1-483a-98a5-2e7e496f1578" (UID: "894df7ab-ced1-483a-98a5-2e7e496f1578"). InnerVolumeSpecName "kube-api-access-2dw44". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.573837 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory" (OuterVolumeSpecName: "inventory") pod "894df7ab-ced1-483a-98a5-2e7e496f1578" (UID: "894df7ab-ced1-483a-98a5-2e7e496f1578"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.583997 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "894df7ab-ced1-483a-98a5-2e7e496f1578" (UID: "894df7ab-ced1-483a-98a5-2e7e496f1578"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.644752 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dw44\" (UniqueName: \"kubernetes.io/projected/894df7ab-ced1-483a-98a5-2e7e496f1578-kube-api-access-2dw44\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.644796 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:37 crc kubenswrapper[4592]: I0929 17:15:37.644810 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/894df7ab-ced1-483a-98a5-2e7e496f1578-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.105666 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" event={"ID":"894df7ab-ced1-483a-98a5-2e7e496f1578","Type":"ContainerDied","Data":"e3e55da994991ff9d090b425fa67d36c16f724c2b9093b2edd4cfaea639f60d8"} Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.105717 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3e55da994991ff9d090b425fa67d36c16f724c2b9093b2edd4cfaea639f60d8" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.105757 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-gqflz" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.186577 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd"] Sep 29 17:15:38 crc kubenswrapper[4592]: E0929 17:15:38.186955 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894df7ab-ced1-483a-98a5-2e7e496f1578" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.186974 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="894df7ab-ced1-483a-98a5-2e7e496f1578" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.187386 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="894df7ab-ced1-483a-98a5-2e7e496f1578" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.188109 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.197221 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.197297 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.197979 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.200374 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.205458 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd"] Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.254294 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8rz2\" (UniqueName: \"kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.254345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.254421 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.254512 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.356230 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8rz2\" (UniqueName: \"kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.356861 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.357023 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.357253 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.361934 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.362101 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.363699 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.378374 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8rz2\" (UniqueName: \"kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:38 crc kubenswrapper[4592]: I0929 17:15:38.504500 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:15:39 crc kubenswrapper[4592]: I0929 17:15:39.037695 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd"] Sep 29 17:15:39 crc kubenswrapper[4592]: I0929 17:15:39.117327 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" event={"ID":"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48","Type":"ContainerStarted","Data":"6628c7d8436bd812b90825d4204027da31a4a706e3e91c9627843ad12675c07c"} Sep 29 17:15:40 crc kubenswrapper[4592]: I0929 17:15:40.126674 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" event={"ID":"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48","Type":"ContainerStarted","Data":"db5c146b1043a1e0f01ad789eb1c8ea8352e855f52d76ccc0dfcff255f4bbee6"} Sep 29 17:15:40 crc kubenswrapper[4592]: I0929 17:15:40.144672 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" podStartSLOduration=1.718340773 podStartE2EDuration="2.144654362s" podCreationTimestamp="2025-09-29 17:15:38 +0000 UTC" firstStartedPulling="2025-09-29 17:15:39.051244375 +0000 UTC m=+1469.199022056" lastFinishedPulling="2025-09-29 17:15:39.477557924 +0000 UTC m=+1469.625335645" observedRunningTime="2025-09-29 17:15:40.140743585 +0000 UTC m=+1470.288521276" watchObservedRunningTime="2025-09-29 17:15:40.144654362 +0000 UTC m=+1470.292432043" Sep 29 17:15:43 crc kubenswrapper[4592]: I0929 17:15:43.322960 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4f6fl" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" probeResult="failure" output=< Sep 29 17:15:43 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:15:43 crc kubenswrapper[4592]: > Sep 29 17:15:52 crc kubenswrapper[4592]: I0929 17:15:52.325348 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:52 crc kubenswrapper[4592]: I0929 17:15:52.378405 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:53 crc kubenswrapper[4592]: I0929 17:15:53.143105 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.241309 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4f6fl" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" containerID="cri-o://55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b" gracePeriod=2 Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.656650 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.758412 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities\") pod \"39476971-ab8d-461e-bdac-865ee86327b3\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.758510 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content\") pod \"39476971-ab8d-461e-bdac-865ee86327b3\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.758640 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx648\" (UniqueName: \"kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648\") pod \"39476971-ab8d-461e-bdac-865ee86327b3\" (UID: \"39476971-ab8d-461e-bdac-865ee86327b3\") " Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.759260 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities" (OuterVolumeSpecName: "utilities") pod "39476971-ab8d-461e-bdac-865ee86327b3" (UID: "39476971-ab8d-461e-bdac-865ee86327b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.765520 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648" (OuterVolumeSpecName: "kube-api-access-hx648") pod "39476971-ab8d-461e-bdac-865ee86327b3" (UID: "39476971-ab8d-461e-bdac-865ee86327b3"). InnerVolumeSpecName "kube-api-access-hx648". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.828588 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39476971-ab8d-461e-bdac-865ee86327b3" (UID: "39476971-ab8d-461e-bdac-865ee86327b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.862113 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx648\" (UniqueName: \"kubernetes.io/projected/39476971-ab8d-461e-bdac-865ee86327b3-kube-api-access-hx648\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.862220 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:54 crc kubenswrapper[4592]: I0929 17:15:54.862247 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39476971-ab8d-461e-bdac-865ee86327b3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.251874 4592 generic.go:334] "Generic (PLEG): container finished" podID="39476971-ab8d-461e-bdac-865ee86327b3" containerID="55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b" exitCode=0 Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.252192 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4f6fl" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.252932 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerDied","Data":"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b"} Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.253047 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4f6fl" event={"ID":"39476971-ab8d-461e-bdac-865ee86327b3","Type":"ContainerDied","Data":"586f3cfdb91d0e1f1aeac5f63335f8a3b97995d1c67f7f786a10a54393d7ee30"} Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.253120 4592 scope.go:117] "RemoveContainer" containerID="55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.272629 4592 scope.go:117] "RemoveContainer" containerID="88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.277042 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.299189 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4f6fl"] Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.306025 4592 scope.go:117] "RemoveContainer" containerID="a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.339663 4592 scope.go:117] "RemoveContainer" containerID="55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b" Sep 29 17:15:55 crc kubenswrapper[4592]: E0929 17:15:55.340026 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b\": container with ID starting with 55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b not found: ID does not exist" containerID="55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.340053 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b"} err="failed to get container status \"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b\": rpc error: code = NotFound desc = could not find container \"55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b\": container with ID starting with 55ac7a6b8dc807149b10922e53b67755eafe7ea49ba8d418fd929df5e0f2964b not found: ID does not exist" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.340073 4592 scope.go:117] "RemoveContainer" containerID="88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7" Sep 29 17:15:55 crc kubenswrapper[4592]: E0929 17:15:55.340559 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7\": container with ID starting with 88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7 not found: ID does not exist" containerID="88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.340614 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7"} err="failed to get container status \"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7\": rpc error: code = NotFound desc = could not find container \"88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7\": container with ID starting with 88990bf59e319d8294153ee325fa7083c040c2f68ef0954a6d98b0bf95f87ed7 not found: ID does not exist" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.340649 4592 scope.go:117] "RemoveContainer" containerID="a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151" Sep 29 17:15:55 crc kubenswrapper[4592]: E0929 17:15:55.340933 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151\": container with ID starting with a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151 not found: ID does not exist" containerID="a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151" Sep 29 17:15:55 crc kubenswrapper[4592]: I0929 17:15:55.340957 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151"} err="failed to get container status \"a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151\": rpc error: code = NotFound desc = could not find container \"a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151\": container with ID starting with a773345fe03a3a6d623159060a200d5fcfebaeedad199a58bfa61d70a6cac151 not found: ID does not exist" Sep 29 17:15:57 crc kubenswrapper[4592]: I0929 17:15:57.193865 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39476971-ab8d-461e-bdac-865ee86327b3" path="/var/lib/kubelet/pods/39476971-ab8d-461e-bdac-865ee86327b3/volumes" Sep 29 17:16:00 crc kubenswrapper[4592]: I0929 17:16:00.883407 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:16:00 crc kubenswrapper[4592]: I0929 17:16:00.883461 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:16:00 crc kubenswrapper[4592]: I0929 17:16:00.883499 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:16:00 crc kubenswrapper[4592]: I0929 17:16:00.884421 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:16:00 crc kubenswrapper[4592]: I0929 17:16:00.884464 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859" gracePeriod=600 Sep 29 17:16:01 crc kubenswrapper[4592]: I0929 17:16:01.314260 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859" exitCode=0 Sep 29 17:16:01 crc kubenswrapper[4592]: I0929 17:16:01.314448 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859"} Sep 29 17:16:01 crc kubenswrapper[4592]: I0929 17:16:01.314588 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5"} Sep 29 17:16:01 crc kubenswrapper[4592]: I0929 17:16:01.314614 4592 scope.go:117] "RemoveContainer" containerID="eda311cdba216e737acbcd0597b515cd95b73924e8324b693474a342758766fb" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.803178 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:10 crc kubenswrapper[4592]: E0929 17:16:10.804218 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="extract-content" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.804234 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="extract-content" Sep 29 17:16:10 crc kubenswrapper[4592]: E0929 17:16:10.804267 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="extract-utilities" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.804279 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="extract-utilities" Sep 29 17:16:10 crc kubenswrapper[4592]: E0929 17:16:10.804309 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.804317 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.804560 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="39476971-ab8d-461e-bdac-865ee86327b3" containerName="registry-server" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.806290 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.818337 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.897844 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zslq7\" (UniqueName: \"kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.897967 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.898034 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.999334 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.999441 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zslq7\" (UniqueName: \"kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.999508 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:10 crc kubenswrapper[4592]: I0929 17:16:10.999971 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:11 crc kubenswrapper[4592]: I0929 17:16:11.000200 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:11 crc kubenswrapper[4592]: I0929 17:16:11.050985 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zslq7\" (UniqueName: \"kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7\") pod \"redhat-marketplace-pwf82\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:11 crc kubenswrapper[4592]: I0929 17:16:11.132563 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:11 crc kubenswrapper[4592]: I0929 17:16:11.640421 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:12 crc kubenswrapper[4592]: I0929 17:16:12.412667 4592 generic.go:334] "Generic (PLEG): container finished" podID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerID="3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19" exitCode=0 Sep 29 17:16:12 crc kubenswrapper[4592]: I0929 17:16:12.413016 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerDied","Data":"3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19"} Sep 29 17:16:12 crc kubenswrapper[4592]: I0929 17:16:12.413040 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerStarted","Data":"0f281aee68044721f2e7a2826801f60e40512e5137315a14856b632f41f34405"} Sep 29 17:16:14 crc kubenswrapper[4592]: I0929 17:16:14.430948 4592 generic.go:334] "Generic (PLEG): container finished" podID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerID="c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8" exitCode=0 Sep 29 17:16:14 crc kubenswrapper[4592]: I0929 17:16:14.431077 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerDied","Data":"c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8"} Sep 29 17:16:15 crc kubenswrapper[4592]: I0929 17:16:15.440430 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerStarted","Data":"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a"} Sep 29 17:16:15 crc kubenswrapper[4592]: I0929 17:16:15.459145 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pwf82" podStartSLOduration=2.874738872 podStartE2EDuration="5.459127739s" podCreationTimestamp="2025-09-29 17:16:10 +0000 UTC" firstStartedPulling="2025-09-29 17:16:12.41501853 +0000 UTC m=+1502.562835442" lastFinishedPulling="2025-09-29 17:16:14.999446628 +0000 UTC m=+1505.147224309" observedRunningTime="2025-09-29 17:16:15.456476355 +0000 UTC m=+1505.604254036" watchObservedRunningTime="2025-09-29 17:16:15.459127739 +0000 UTC m=+1505.606905420" Sep 29 17:16:19 crc kubenswrapper[4592]: I0929 17:16:19.942582 4592 scope.go:117] "RemoveContainer" containerID="4789bd247a0de604482b1c428a991531f2f398fbb3f663d37b9800f4cda91f08" Sep 29 17:16:21 crc kubenswrapper[4592]: I0929 17:16:21.133537 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:21 crc kubenswrapper[4592]: I0929 17:16:21.134028 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:21 crc kubenswrapper[4592]: I0929 17:16:21.204870 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:21 crc kubenswrapper[4592]: I0929 17:16:21.561335 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:21 crc kubenswrapper[4592]: I0929 17:16:21.617681 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:23 crc kubenswrapper[4592]: I0929 17:16:23.520256 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pwf82" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="registry-server" containerID="cri-o://d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a" gracePeriod=2 Sep 29 17:16:23 crc kubenswrapper[4592]: I0929 17:16:23.959912 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.047463 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zslq7\" (UniqueName: \"kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7\") pod \"dfe56d69-5754-4249-b98d-4c009b3023bb\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.047732 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content\") pod \"dfe56d69-5754-4249-b98d-4c009b3023bb\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.047772 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities\") pod \"dfe56d69-5754-4249-b98d-4c009b3023bb\" (UID: \"dfe56d69-5754-4249-b98d-4c009b3023bb\") " Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.049506 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities" (OuterVolumeSpecName: "utilities") pod "dfe56d69-5754-4249-b98d-4c009b3023bb" (UID: "dfe56d69-5754-4249-b98d-4c009b3023bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.052808 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7" (OuterVolumeSpecName: "kube-api-access-zslq7") pod "dfe56d69-5754-4249-b98d-4c009b3023bb" (UID: "dfe56d69-5754-4249-b98d-4c009b3023bb"). InnerVolumeSpecName "kube-api-access-zslq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.064257 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dfe56d69-5754-4249-b98d-4c009b3023bb" (UID: "dfe56d69-5754-4249-b98d-4c009b3023bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.150356 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.150394 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zslq7\" (UniqueName: \"kubernetes.io/projected/dfe56d69-5754-4249-b98d-4c009b3023bb-kube-api-access-zslq7\") on node \"crc\" DevicePath \"\"" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.150404 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfe56d69-5754-4249-b98d-4c009b3023bb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.533359 4592 generic.go:334] "Generic (PLEG): container finished" podID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerID="d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a" exitCode=0 Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.533407 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerDied","Data":"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a"} Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.533438 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwf82" event={"ID":"dfe56d69-5754-4249-b98d-4c009b3023bb","Type":"ContainerDied","Data":"0f281aee68044721f2e7a2826801f60e40512e5137315a14856b632f41f34405"} Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.533458 4592 scope.go:117] "RemoveContainer" containerID="d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.533498 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwf82" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.557743 4592 scope.go:117] "RemoveContainer" containerID="c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.572752 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.608589 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwf82"] Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.609664 4592 scope.go:117] "RemoveContainer" containerID="3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.635885 4592 scope.go:117] "RemoveContainer" containerID="d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a" Sep 29 17:16:24 crc kubenswrapper[4592]: E0929 17:16:24.636474 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a\": container with ID starting with d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a not found: ID does not exist" containerID="d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.636502 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a"} err="failed to get container status \"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a\": rpc error: code = NotFound desc = could not find container \"d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a\": container with ID starting with d057efc5dc0e29d0b5d0052833a82fa0d57d72a336b4a4929853e2b176ffac3a not found: ID does not exist" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.636523 4592 scope.go:117] "RemoveContainer" containerID="c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8" Sep 29 17:16:24 crc kubenswrapper[4592]: E0929 17:16:24.636794 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8\": container with ID starting with c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8 not found: ID does not exist" containerID="c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.636810 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8"} err="failed to get container status \"c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8\": rpc error: code = NotFound desc = could not find container \"c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8\": container with ID starting with c400220a509c3d9eab1dc89dafaabe44d08e467d275f33987b1625dd6e6db5c8 not found: ID does not exist" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.636840 4592 scope.go:117] "RemoveContainer" containerID="3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19" Sep 29 17:16:24 crc kubenswrapper[4592]: E0929 17:16:24.637100 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19\": container with ID starting with 3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19 not found: ID does not exist" containerID="3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19" Sep 29 17:16:24 crc kubenswrapper[4592]: I0929 17:16:24.637116 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19"} err="failed to get container status \"3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19\": rpc error: code = NotFound desc = could not find container \"3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19\": container with ID starting with 3da41cd403cbc4b21758f94de05eae91c959560087a63a1f17c75a310e8fcc19 not found: ID does not exist" Sep 29 17:16:25 crc kubenswrapper[4592]: I0929 17:16:25.195099 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" path="/var/lib/kubelet/pods/dfe56d69-5754-4249-b98d-4c009b3023bb/volumes" Sep 29 17:18:30 crc kubenswrapper[4592]: I0929 17:18:30.883268 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:18:30 crc kubenswrapper[4592]: I0929 17:18:30.883996 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.044466 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-26t8s"] Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.059131 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-clmzt"] Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.069055 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-clmzt"] Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.076117 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-26t8s"] Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.200746 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="784e7ce0-88bf-4f62-a9f2-945d9130750a" path="/var/lib/kubelet/pods/784e7ce0-88bf-4f62-a9f2-945d9130750a/volumes" Sep 29 17:18:35 crc kubenswrapper[4592]: I0929 17:18:35.202495 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6fab31a-6011-49cd-8191-0da215d37ed5" path="/var/lib/kubelet/pods/b6fab31a-6011-49cd-8191-0da215d37ed5/volumes" Sep 29 17:18:40 crc kubenswrapper[4592]: I0929 17:18:40.027295 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-mswng"] Sep 29 17:18:40 crc kubenswrapper[4592]: I0929 17:18:40.039491 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-mswng"] Sep 29 17:18:41 crc kubenswrapper[4592]: I0929 17:18:41.194661 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f9d25df-ce35-40af-999f-f80d6178e7c2" path="/var/lib/kubelet/pods/2f9d25df-ce35-40af-999f-f80d6178e7c2/volumes" Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.028882 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e597-account-create-d9sx7"] Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.037341 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-9a08-account-create-b4zwb"] Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.044834 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e597-account-create-d9sx7"] Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.052087 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-9a08-account-create-b4zwb"] Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.194760 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4906ee82-cf4d-4ee7-9bba-d3953ab8ee23" path="/var/lib/kubelet/pods/4906ee82-cf4d-4ee7-9bba-d3953ab8ee23/volumes" Sep 29 17:18:45 crc kubenswrapper[4592]: I0929 17:18:45.196883 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daf5a84e-448e-458b-add1-59b8bf1bfc30" path="/var/lib/kubelet/pods/daf5a84e-448e-458b-add1-59b8bf1bfc30/volumes" Sep 29 17:18:52 crc kubenswrapper[4592]: I0929 17:18:52.038902 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-415a-account-create-8r6tt"] Sep 29 17:18:52 crc kubenswrapper[4592]: I0929 17:18:52.056606 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-415a-account-create-8r6tt"] Sep 29 17:18:53 crc kubenswrapper[4592]: I0929 17:18:53.195251 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64a1fa2f-51dd-4755-8823-93be4cbbf71c" path="/var/lib/kubelet/pods/64a1fa2f-51dd-4755-8823-93be4cbbf71c/volumes" Sep 29 17:19:00 crc kubenswrapper[4592]: I0929 17:19:00.883674 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:19:00 crc kubenswrapper[4592]: I0929 17:19:00.884362 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:19:01 crc kubenswrapper[4592]: I0929 17:19:01.051533 4592 generic.go:334] "Generic (PLEG): container finished" podID="5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" containerID="db5c146b1043a1e0f01ad789eb1c8ea8352e855f52d76ccc0dfcff255f4bbee6" exitCode=0 Sep 29 17:19:01 crc kubenswrapper[4592]: I0929 17:19:01.051616 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" event={"ID":"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48","Type":"ContainerDied","Data":"db5c146b1043a1e0f01ad789eb1c8ea8352e855f52d76ccc0dfcff255f4bbee6"} Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.475032 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.493780 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory\") pod \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.493850 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key\") pod \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.493889 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8rz2\" (UniqueName: \"kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2\") pod \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.493963 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle\") pod \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\" (UID: \"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48\") " Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.500650 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" (UID: "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.503469 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2" (OuterVolumeSpecName: "kube-api-access-b8rz2") pod "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" (UID: "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48"). InnerVolumeSpecName "kube-api-access-b8rz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.525301 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" (UID: "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.528922 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory" (OuterVolumeSpecName: "inventory") pod "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" (UID: "5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.596389 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.596421 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.596433 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8rz2\" (UniqueName: \"kubernetes.io/projected/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-kube-api-access-b8rz2\") on node \"crc\" DevicePath \"\"" Sep 29 17:19:02 crc kubenswrapper[4592]: I0929 17:19:02.596449 4592 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.038105 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-h9l7h"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.048874 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-kp4jw"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.061328 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-pnxjz"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.070650 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-h9l7h"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.072977 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" event={"ID":"5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48","Type":"ContainerDied","Data":"6628c7d8436bd812b90825d4204027da31a4a706e3e91c9627843ad12675c07c"} Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.073011 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6628c7d8436bd812b90825d4204027da31a4a706e3e91c9627843ad12675c07c" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.073022 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.080997 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-pnxjz"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.091295 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-kp4jw"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.164493 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s"] Sep 29 17:19:03 crc kubenswrapper[4592]: E0929 17:19:03.164953 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.164978 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 17:19:03 crc kubenswrapper[4592]: E0929 17:19:03.165009 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="extract-utilities" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.165017 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="extract-utilities" Sep 29 17:19:03 crc kubenswrapper[4592]: E0929 17:19:03.165046 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="registry-server" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.165053 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="registry-server" Sep 29 17:19:03 crc kubenswrapper[4592]: E0929 17:19:03.165074 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="extract-content" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.165082 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="extract-content" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.165418 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe56d69-5754-4249-b98d-4c009b3023bb" containerName="registry-server" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.165442 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.166299 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.168356 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.168408 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.168627 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.174290 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.195070 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0be0b236-d9e2-4cc1-88dd-264436334ae2" path="/var/lib/kubelet/pods/0be0b236-d9e2-4cc1-88dd-264436334ae2/volumes" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.200568 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e5e30f6-50a2-4615-97a4-8e666b41c54a" path="/var/lib/kubelet/pods/2e5e30f6-50a2-4615-97a4-8e666b41c54a/volumes" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.202421 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3797e303-3118-4d4b-a6da-78e5737d8fcc" path="/var/lib/kubelet/pods/3797e303-3118-4d4b-a6da-78e5737d8fcc/volumes" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.204096 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s"] Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.209135 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.209268 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5r6z\" (UniqueName: \"kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.209430 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.310656 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.310750 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.310793 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5r6z\" (UniqueName: \"kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.318259 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.319124 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.332760 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5r6z\" (UniqueName: \"kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:03 crc kubenswrapper[4592]: I0929 17:19:03.491501 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:19:04 crc kubenswrapper[4592]: I0929 17:19:04.029133 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s"] Sep 29 17:19:04 crc kubenswrapper[4592]: W0929 17:19:04.033801 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0275d99_00b1_4174_ab01_598af7ed19b7.slice/crio-6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21 WatchSource:0}: Error finding container 6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21: Status 404 returned error can't find the container with id 6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21 Sep 29 17:19:04 crc kubenswrapper[4592]: I0929 17:19:04.036931 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:19:04 crc kubenswrapper[4592]: I0929 17:19:04.086357 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" event={"ID":"b0275d99-00b1-4174-ab01-598af7ed19b7","Type":"ContainerStarted","Data":"6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21"} Sep 29 17:19:05 crc kubenswrapper[4592]: I0929 17:19:05.105871 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" event={"ID":"b0275d99-00b1-4174-ab01-598af7ed19b7","Type":"ContainerStarted","Data":"b5cfac82b3d13c82da0367e57fe9da5afbb29c5848ba41c8dc143af830dc7104"} Sep 29 17:19:05 crc kubenswrapper[4592]: I0929 17:19:05.131946 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" podStartSLOduration=1.648423178 podStartE2EDuration="2.13192336s" podCreationTimestamp="2025-09-29 17:19:03 +0000 UTC" firstStartedPulling="2025-09-29 17:19:04.036610746 +0000 UTC m=+1674.184388427" lastFinishedPulling="2025-09-29 17:19:04.520110928 +0000 UTC m=+1674.667888609" observedRunningTime="2025-09-29 17:19:05.123530161 +0000 UTC m=+1675.271307842" watchObservedRunningTime="2025-09-29 17:19:05.13192336 +0000 UTC m=+1675.279701051" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.104189 4592 scope.go:117] "RemoveContainer" containerID="428d148676fb88bfa46aa41445590e6dbea748b4c5e1f83a1e3649890ebfbbe5" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.137447 4592 scope.go:117] "RemoveContainer" containerID="72ee675685325aaa10f59899167fceda6acc68b742ac3623c55287c15c8a1456" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.165561 4592 scope.go:117] "RemoveContainer" containerID="6ded64817cfe114aa58af543f1922a0811bd7f68a09f6190aac6c5885cb90b26" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.218756 4592 scope.go:117] "RemoveContainer" containerID="d9e83a97ef8961a128edc1281cc64d6f08aa209746f0adb5b448891ca16bb9a0" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.265486 4592 scope.go:117] "RemoveContainer" containerID="be0a469ad5d2a8f7d0440f53db36ba2a93cf54b4571daa9469addb3b926291e1" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.322014 4592 scope.go:117] "RemoveContainer" containerID="2d7a529c3d95d1440c07ec0f0f2914651b3cb75d784490aa13082f457665d33c" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.364105 4592 scope.go:117] "RemoveContainer" containerID="35a7d5f2265b9c8040df77c2f722e28e2e2c8ee2740e9cfdb9c14c4169dd5bb7" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.407323 4592 scope.go:117] "RemoveContainer" containerID="f5b6b298c1c03bfc8bfd076589e1bd4232cf6bc7608e520b6a2c613465c57455" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.432929 4592 scope.go:117] "RemoveContainer" containerID="5eaa2af99cedc681c75c46bc9c4c7ed7876f273bd73754754201446df74d811c" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.458709 4592 scope.go:117] "RemoveContainer" containerID="378aee20856f944412f2afaeba4c867d23cba3ace3badde6bc3a475bfd570173" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.479476 4592 scope.go:117] "RemoveContainer" containerID="016204b5c6bb67ac28917edf7322bdfa9a568f677d8d4f754e57d39edea938f8" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.513509 4592 scope.go:117] "RemoveContainer" containerID="865ad78aaaaf4f6d704ff809f1247aa134c6100d27d8cf665bb7f00e2be4af28" Sep 29 17:19:20 crc kubenswrapper[4592]: I0929 17:19:20.540530 4592 scope.go:117] "RemoveContainer" containerID="00f2a60b3610dbf30b9cd143ac1a4c4b1873aab80e916871ee7fa11bbc0b4897" Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.031121 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-bd20-account-create-w6tjm"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.037284 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-ccba-account-create-gth5x"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.045316 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-bd20-account-create-w6tjm"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.053916 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-ccba-account-create-gth5x"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.062208 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f76f-account-create-gzdk8"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.071174 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f76f-account-create-gzdk8"] Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.196415 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14980c16-3c96-4bdc-a271-86ea80fe1ef3" path="/var/lib/kubelet/pods/14980c16-3c96-4bdc-a271-86ea80fe1ef3/volumes" Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.197409 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a56497a-4fc4-4b0f-b831-8de37479949b" path="/var/lib/kubelet/pods/7a56497a-4fc4-4b0f-b831-8de37479949b/volumes" Sep 29 17:19:21 crc kubenswrapper[4592]: I0929 17:19:21.198172 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6264c6a-bee2-432e-b70c-1afad98e3db5" path="/var/lib/kubelet/pods/b6264c6a-bee2-432e-b70c-1afad98e3db5/volumes" Sep 29 17:19:25 crc kubenswrapper[4592]: I0929 17:19:25.029669 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-rplcj"] Sep 29 17:19:25 crc kubenswrapper[4592]: I0929 17:19:25.045963 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-rplcj"] Sep 29 17:19:25 crc kubenswrapper[4592]: I0929 17:19:25.199164 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94891c4d-d21e-419f-9ca9-ce48bef5b069" path="/var/lib/kubelet/pods/94891c4d-d21e-419f-9ca9-ce48bef5b069/volumes" Sep 29 17:19:30 crc kubenswrapper[4592]: I0929 17:19:30.883411 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:19:30 crc kubenswrapper[4592]: I0929 17:19:30.883791 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:19:30 crc kubenswrapper[4592]: I0929 17:19:30.883855 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:19:30 crc kubenswrapper[4592]: I0929 17:19:30.885081 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:19:30 crc kubenswrapper[4592]: I0929 17:19:30.885233 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" gracePeriod=600 Sep 29 17:19:31 crc kubenswrapper[4592]: E0929 17:19:31.006288 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:19:31 crc kubenswrapper[4592]: I0929 17:19:31.389470 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" exitCode=0 Sep 29 17:19:31 crc kubenswrapper[4592]: I0929 17:19:31.389512 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5"} Sep 29 17:19:31 crc kubenswrapper[4592]: I0929 17:19:31.389547 4592 scope.go:117] "RemoveContainer" containerID="6ba93e7083930b491deeabf3f5e5e00bbbacfa31695ad5f283ea89667e717859" Sep 29 17:19:31 crc kubenswrapper[4592]: I0929 17:19:31.390271 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:19:31 crc kubenswrapper[4592]: E0929 17:19:31.390684 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:19:44 crc kubenswrapper[4592]: I0929 17:19:44.183029 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:19:44 crc kubenswrapper[4592]: E0929 17:19:44.183829 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:19:57 crc kubenswrapper[4592]: I0929 17:19:57.183524 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:19:57 crc kubenswrapper[4592]: E0929 17:19:57.184244 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:20:03 crc kubenswrapper[4592]: I0929 17:20:03.038517 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-zzm6d"] Sep 29 17:20:03 crc kubenswrapper[4592]: I0929 17:20:03.048255 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-zzm6d"] Sep 29 17:20:03 crc kubenswrapper[4592]: I0929 17:20:03.193382 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f923cac-6659-4bb9-9f5f-8278a4492b35" path="/var/lib/kubelet/pods/2f923cac-6659-4bb9-9f5f-8278a4492b35/volumes" Sep 29 17:20:08 crc kubenswrapper[4592]: I0929 17:20:08.183880 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:20:08 crc kubenswrapper[4592]: E0929 17:20:08.184675 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.034624 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-dvfxl"] Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.045565 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-lgsdp"] Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.058045 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-lgsdp"] Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.065233 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-dvfxl"] Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.207782 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e678434-33e7-4c70-adff-88140eb9d3af" path="/var/lib/kubelet/pods/9e678434-33e7-4c70-adff-88140eb9d3af/volumes" Sep 29 17:20:17 crc kubenswrapper[4592]: I0929 17:20:17.211260 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c518ef27-73ac-4f0b-a896-44284fd049c5" path="/var/lib/kubelet/pods/c518ef27-73ac-4f0b-a896-44284fd049c5/volumes" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.183459 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:20:20 crc kubenswrapper[4592]: E0929 17:20:20.184116 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.745581 4592 scope.go:117] "RemoveContainer" containerID="d660359deadfeacc0f11b2dd28fdcbb5cf8df3582de8dce24c059af2ce7f563d" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.780606 4592 scope.go:117] "RemoveContainer" containerID="3b4dd8b08baf69bafc9b66ad195e82abf44789bfbf01db4a305cb34a9c947086" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.829984 4592 scope.go:117] "RemoveContainer" containerID="6e8b2fc8aace48ca49c4f12eb56df304e012ad4cab16de0938de1bc9cf5dc76a" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.878822 4592 scope.go:117] "RemoveContainer" containerID="375c759c41213f449730f5c9f475ac2191282e96fb3fdf7b826e4eec34d50581" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.922532 4592 scope.go:117] "RemoveContainer" containerID="a4550ed229ae0c5d517b3b9646d8563cfe1886a767827988302647bc1ef6adf7" Sep 29 17:20:20 crc kubenswrapper[4592]: I0929 17:20:20.957935 4592 scope.go:117] "RemoveContainer" containerID="1b1d084b2300ab35e59270d0016acb470a7d058323d3727cf06ff278b1c6c197" Sep 29 17:20:21 crc kubenswrapper[4592]: I0929 17:20:21.001224 4592 scope.go:117] "RemoveContainer" containerID="92b54ce49fe66336b284dd3e48e80020a0d9c91bf10639a593aac95f9d26b310" Sep 29 17:20:28 crc kubenswrapper[4592]: I0929 17:20:28.038336 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-9gcq7"] Sep 29 17:20:28 crc kubenswrapper[4592]: I0929 17:20:28.059004 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-hgnnh"] Sep 29 17:20:28 crc kubenswrapper[4592]: I0929 17:20:28.072305 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-9gcq7"] Sep 29 17:20:28 crc kubenswrapper[4592]: I0929 17:20:28.083215 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-hgnnh"] Sep 29 17:20:29 crc kubenswrapper[4592]: I0929 17:20:29.193680 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4df9236d-2c26-4b89-acfc-d0de121eb93c" path="/var/lib/kubelet/pods/4df9236d-2c26-4b89-acfc-d0de121eb93c/volumes" Sep 29 17:20:29 crc kubenswrapper[4592]: I0929 17:20:29.194336 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f760ecfd-a454-4a77-89c3-0703ea63c515" path="/var/lib/kubelet/pods/f760ecfd-a454-4a77-89c3-0703ea63c515/volumes" Sep 29 17:20:32 crc kubenswrapper[4592]: I0929 17:20:32.182913 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:20:32 crc kubenswrapper[4592]: E0929 17:20:32.184467 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:20:35 crc kubenswrapper[4592]: I0929 17:20:35.066724 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-f2q9n"] Sep 29 17:20:35 crc kubenswrapper[4592]: I0929 17:20:35.077419 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-f2q9n"] Sep 29 17:20:35 crc kubenswrapper[4592]: I0929 17:20:35.194867 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c80e2b1-f512-432e-87fe-c0ea60e6a546" path="/var/lib/kubelet/pods/8c80e2b1-f512-432e-87fe-c0ea60e6a546/volumes" Sep 29 17:20:47 crc kubenswrapper[4592]: I0929 17:20:47.185616 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:20:47 crc kubenswrapper[4592]: E0929 17:20:47.186566 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:20:54 crc kubenswrapper[4592]: E0929 17:20:54.708945 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0275d99_00b1_4174_ab01_598af7ed19b7.slice/crio-b5cfac82b3d13c82da0367e57fe9da5afbb29c5848ba41c8dc143af830dc7104.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:20:55 crc kubenswrapper[4592]: I0929 17:20:55.145462 4592 generic.go:334] "Generic (PLEG): container finished" podID="b0275d99-00b1-4174-ab01-598af7ed19b7" containerID="b5cfac82b3d13c82da0367e57fe9da5afbb29c5848ba41c8dc143af830dc7104" exitCode=0 Sep 29 17:20:55 crc kubenswrapper[4592]: I0929 17:20:55.145512 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" event={"ID":"b0275d99-00b1-4174-ab01-598af7ed19b7","Type":"ContainerDied","Data":"b5cfac82b3d13c82da0367e57fe9da5afbb29c5848ba41c8dc143af830dc7104"} Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.546098 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.705249 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory\") pod \"b0275d99-00b1-4174-ab01-598af7ed19b7\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.705474 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key\") pod \"b0275d99-00b1-4174-ab01-598af7ed19b7\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.705528 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5r6z\" (UniqueName: \"kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z\") pod \"b0275d99-00b1-4174-ab01-598af7ed19b7\" (UID: \"b0275d99-00b1-4174-ab01-598af7ed19b7\") " Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.710348 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z" (OuterVolumeSpecName: "kube-api-access-g5r6z") pod "b0275d99-00b1-4174-ab01-598af7ed19b7" (UID: "b0275d99-00b1-4174-ab01-598af7ed19b7"). InnerVolumeSpecName "kube-api-access-g5r6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.737351 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory" (OuterVolumeSpecName: "inventory") pod "b0275d99-00b1-4174-ab01-598af7ed19b7" (UID: "b0275d99-00b1-4174-ab01-598af7ed19b7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.743704 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b0275d99-00b1-4174-ab01-598af7ed19b7" (UID: "b0275d99-00b1-4174-ab01-598af7ed19b7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.807250 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.807470 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5r6z\" (UniqueName: \"kubernetes.io/projected/b0275d99-00b1-4174-ab01-598af7ed19b7-kube-api-access-g5r6z\") on node \"crc\" DevicePath \"\"" Sep 29 17:20:56 crc kubenswrapper[4592]: I0929 17:20:56.807531 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b0275d99-00b1-4174-ab01-598af7ed19b7-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.171039 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" event={"ID":"b0275d99-00b1-4174-ab01-598af7ed19b7","Type":"ContainerDied","Data":"6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21"} Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.171082 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f14736cd63becc46e0155c530a50fb45903951e9690291a1abdf58be3208c21" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.171175 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.298100 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t"] Sep 29 17:20:57 crc kubenswrapper[4592]: E0929 17:20:57.298549 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0275d99-00b1-4174-ab01-598af7ed19b7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.298572 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0275d99-00b1-4174-ab01-598af7ed19b7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.298757 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0275d99-00b1-4174-ab01-598af7ed19b7" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.299389 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.301871 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.301960 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.302079 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.302139 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.354336 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t"] Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.424550 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cqmz\" (UniqueName: \"kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.424634 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.424699 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.525866 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.525991 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cqmz\" (UniqueName: \"kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.526057 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.530269 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.531108 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.559888 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cqmz\" (UniqueName: \"kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:57 crc kubenswrapper[4592]: I0929 17:20:57.615910 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:20:58 crc kubenswrapper[4592]: W0929 17:20:58.158396 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2c16e8_c860_42a9_9888_63e22d9d57b2.slice/crio-72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0 WatchSource:0}: Error finding container 72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0: Status 404 returned error can't find the container with id 72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0 Sep 29 17:20:58 crc kubenswrapper[4592]: I0929 17:20:58.160711 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t"] Sep 29 17:20:58 crc kubenswrapper[4592]: I0929 17:20:58.181649 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" event={"ID":"5f2c16e8-c860-42a9-9888-63e22d9d57b2","Type":"ContainerStarted","Data":"72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0"} Sep 29 17:20:59 crc kubenswrapper[4592]: I0929 17:20:59.195297 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" event={"ID":"5f2c16e8-c860-42a9-9888-63e22d9d57b2","Type":"ContainerStarted","Data":"eb34c27d87d5ea300debe85bcd09d0f3e049e33cee4e9646abde964e3bad37ad"} Sep 29 17:20:59 crc kubenswrapper[4592]: I0929 17:20:59.216173 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" podStartSLOduration=1.795124953 podStartE2EDuration="2.216137167s" podCreationTimestamp="2025-09-29 17:20:57 +0000 UTC" firstStartedPulling="2025-09-29 17:20:58.159918361 +0000 UTC m=+1788.307696042" lastFinishedPulling="2025-09-29 17:20:58.580930575 +0000 UTC m=+1788.728708256" observedRunningTime="2025-09-29 17:20:59.207645753 +0000 UTC m=+1789.355423434" watchObservedRunningTime="2025-09-29 17:20:59.216137167 +0000 UTC m=+1789.363914848" Sep 29 17:21:00 crc kubenswrapper[4592]: I0929 17:21:00.185537 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:21:00 crc kubenswrapper[4592]: E0929 17:21:00.185821 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:21:06 crc kubenswrapper[4592]: I0929 17:21:06.045221 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-g8ghq"] Sep 29 17:21:06 crc kubenswrapper[4592]: I0929 17:21:06.053812 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-x2l72"] Sep 29 17:21:06 crc kubenswrapper[4592]: I0929 17:21:06.063627 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-g8ghq"] Sep 29 17:21:06 crc kubenswrapper[4592]: I0929 17:21:06.072630 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-x2l72"] Sep 29 17:21:07 crc kubenswrapper[4592]: I0929 17:21:07.027403 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-4c8jd"] Sep 29 17:21:07 crc kubenswrapper[4592]: I0929 17:21:07.045809 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-4c8jd"] Sep 29 17:21:07 crc kubenswrapper[4592]: I0929 17:21:07.197652 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00235364-b815-42c6-80e2-f876dd991541" path="/var/lib/kubelet/pods/00235364-b815-42c6-80e2-f876dd991541/volumes" Sep 29 17:21:07 crc kubenswrapper[4592]: I0929 17:21:07.198406 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab0d70af-d7f7-4c70-813f-e26252c411a8" path="/var/lib/kubelet/pods/ab0d70af-d7f7-4c70-813f-e26252c411a8/volumes" Sep 29 17:21:07 crc kubenswrapper[4592]: I0929 17:21:07.199023 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b54f12ab-68c8-47eb-9bc4-93adee895d06" path="/var/lib/kubelet/pods/b54f12ab-68c8-47eb-9bc4-93adee895d06/volumes" Sep 29 17:21:11 crc kubenswrapper[4592]: I0929 17:21:11.189316 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:21:11 crc kubenswrapper[4592]: E0929 17:21:11.189895 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:21:20 crc kubenswrapper[4592]: I0929 17:21:20.035905 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-94eb-account-create-wfwp5"] Sep 29 17:21:20 crc kubenswrapper[4592]: I0929 17:21:20.046007 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-5abd-account-create-sqj9l"] Sep 29 17:21:20 crc kubenswrapper[4592]: I0929 17:21:20.056443 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-94eb-account-create-wfwp5"] Sep 29 17:21:20 crc kubenswrapper[4592]: I0929 17:21:20.065411 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-5abd-account-create-sqj9l"] Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.032761 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-bee2-account-create-rrx9l"] Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.041281 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-bee2-account-create-rrx9l"] Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.119175 4592 scope.go:117] "RemoveContainer" containerID="b1e1f5a4cd45518c2d74384ca679f452e915a26f2edbfcb9590d368211f303aa" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.145198 4592 scope.go:117] "RemoveContainer" containerID="3d153efe2d442d5cf9d5c2ea7ea15199eb598344fac9ac084fc12f46e2e1847a" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.204579 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0167bdf3-7113-4993-b294-d33073462e4d" path="/var/lib/kubelet/pods/0167bdf3-7113-4993-b294-d33073462e4d/volumes" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.205746 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09df60bf-ab19-417c-8910-c666047d0ec9" path="/var/lib/kubelet/pods/09df60bf-ab19-417c-8910-c666047d0ec9/volumes" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.206534 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aec6f146-7664-4f01-ab13-7f4d9bad57f2" path="/var/lib/kubelet/pods/aec6f146-7664-4f01-ab13-7f4d9bad57f2/volumes" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.211397 4592 scope.go:117] "RemoveContainer" containerID="43f4f54b807fc9b253229dfc0397bb3a0f82d65b1362e4ca1de24653bc81f8cc" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.241476 4592 scope.go:117] "RemoveContainer" containerID="8fd399cdc3e75e50cc234af8ce51516ffe384aed2b993c5d201c19bb2b45d1da" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.305557 4592 scope.go:117] "RemoveContainer" containerID="8821f7c62ef22176df069ec61787a80bfbf6ac718ca197b2d84593dd465b7d66" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.341738 4592 scope.go:117] "RemoveContainer" containerID="9405561b29682530ed59529f35893507c857d46203a082f0e6cbcc2d1efb8d68" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.385932 4592 scope.go:117] "RemoveContainer" containerID="924a1646f5369fa836964d520b6dc57969883c479ce67698c3bb777ade64117a" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.419467 4592 scope.go:117] "RemoveContainer" containerID="583162e92e8f63d66f73dbfd38668cc7bf9edc849791d912cc186ece00a11d98" Sep 29 17:21:21 crc kubenswrapper[4592]: I0929 17:21:21.436617 4592 scope.go:117] "RemoveContainer" containerID="70eb1f280be325225e6184281858027bdcf55b861e6e8eddcd1312a382c946a4" Sep 29 17:21:22 crc kubenswrapper[4592]: I0929 17:21:22.184620 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:21:22 crc kubenswrapper[4592]: E0929 17:21:22.184886 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:21:33 crc kubenswrapper[4592]: I0929 17:21:33.183392 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:21:33 crc kubenswrapper[4592]: E0929 17:21:33.184265 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:21:48 crc kubenswrapper[4592]: I0929 17:21:48.183836 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:21:48 crc kubenswrapper[4592]: E0929 17:21:48.184606 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:01 crc kubenswrapper[4592]: I0929 17:22:01.192498 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:22:01 crc kubenswrapper[4592]: E0929 17:22:01.193303 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:13 crc kubenswrapper[4592]: I0929 17:22:13.868820 4592 generic.go:334] "Generic (PLEG): container finished" podID="5f2c16e8-c860-42a9-9888-63e22d9d57b2" containerID="eb34c27d87d5ea300debe85bcd09d0f3e049e33cee4e9646abde964e3bad37ad" exitCode=0 Sep 29 17:22:13 crc kubenswrapper[4592]: I0929 17:22:13.868895 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" event={"ID":"5f2c16e8-c860-42a9-9888-63e22d9d57b2","Type":"ContainerDied","Data":"eb34c27d87d5ea300debe85bcd09d0f3e049e33cee4e9646abde964e3bad37ad"} Sep 29 17:22:14 crc kubenswrapper[4592]: I0929 17:22:14.047021 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hptl4"] Sep 29 17:22:14 crc kubenswrapper[4592]: I0929 17:22:14.057078 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-hptl4"] Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.184394 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:22:15 crc kubenswrapper[4592]: E0929 17:22:15.184858 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.200300 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="617a27c8-02ce-43f0-a41d-230af300cafe" path="/var/lib/kubelet/pods/617a27c8-02ce-43f0-a41d-230af300cafe/volumes" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.273396 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.310805 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cqmz\" (UniqueName: \"kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz\") pod \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.310848 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key\") pod \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.310946 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory\") pod \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\" (UID: \"5f2c16e8-c860-42a9-9888-63e22d9d57b2\") " Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.324839 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz" (OuterVolumeSpecName: "kube-api-access-5cqmz") pod "5f2c16e8-c860-42a9-9888-63e22d9d57b2" (UID: "5f2c16e8-c860-42a9-9888-63e22d9d57b2"). InnerVolumeSpecName "kube-api-access-5cqmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.349940 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory" (OuterVolumeSpecName: "inventory") pod "5f2c16e8-c860-42a9-9888-63e22d9d57b2" (UID: "5f2c16e8-c860-42a9-9888-63e22d9d57b2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.350482 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f2c16e8-c860-42a9-9888-63e22d9d57b2" (UID: "5f2c16e8-c860-42a9-9888-63e22d9d57b2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.413106 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.413141 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cqmz\" (UniqueName: \"kubernetes.io/projected/5f2c16e8-c860-42a9-9888-63e22d9d57b2-kube-api-access-5cqmz\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.413166 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f2c16e8-c860-42a9-9888-63e22d9d57b2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.910667 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" event={"ID":"5f2c16e8-c860-42a9-9888-63e22d9d57b2","Type":"ContainerDied","Data":"72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0"} Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.910716 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72dd0b902d0d4964b01f4a65712cafa7e6219ae737fe50e389d579c62e01eee0" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.910769 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.992404 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9"] Sep 29 17:22:15 crc kubenswrapper[4592]: E0929 17:22:15.992904 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2c16e8-c860-42a9-9888-63e22d9d57b2" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.992930 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2c16e8-c860-42a9-9888-63e22d9d57b2" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.993187 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2c16e8-c860-42a9-9888-63e22d9d57b2" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.993993 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.998961 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.999056 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.999255 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:22:15 crc kubenswrapper[4592]: I0929 17:22:15.999407 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.025809 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9"] Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.125915 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.125991 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.126061 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sdk4\" (UniqueName: \"kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.227598 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.227646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.227690 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sdk4\" (UniqueName: \"kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.238397 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.238813 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.249744 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sdk4\" (UniqueName: \"kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.322700 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.900619 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9"] Sep 29 17:22:16 crc kubenswrapper[4592]: I0929 17:22:16.923949 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" event={"ID":"0641d7e4-c868-48bd-948d-186401c6f3c7","Type":"ContainerStarted","Data":"21d2c1dd85014b19ba76aa89399c66aed6ce8f68ad5cfbdd0cd034ae923b3705"} Sep 29 17:22:18 crc kubenswrapper[4592]: I0929 17:22:18.945644 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" event={"ID":"0641d7e4-c868-48bd-948d-186401c6f3c7","Type":"ContainerStarted","Data":"0c8a1589623f6e74eb3191636a024f14a94a00f689c34360875b10505c1c0380"} Sep 29 17:22:18 crc kubenswrapper[4592]: I0929 17:22:18.969548 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" podStartSLOduration=2.876522716 podStartE2EDuration="3.96952935s" podCreationTimestamp="2025-09-29 17:22:15 +0000 UTC" firstStartedPulling="2025-09-29 17:22:16.911392649 +0000 UTC m=+1867.059170330" lastFinishedPulling="2025-09-29 17:22:18.004399283 +0000 UTC m=+1868.152176964" observedRunningTime="2025-09-29 17:22:18.966676572 +0000 UTC m=+1869.114454263" watchObservedRunningTime="2025-09-29 17:22:18.96952935 +0000 UTC m=+1869.117307041" Sep 29 17:22:21 crc kubenswrapper[4592]: I0929 17:22:21.648098 4592 scope.go:117] "RemoveContainer" containerID="9f90897c66c8e5737be4f073a265520a2a9ccb80861b5d2042a54f8955e1be71" Sep 29 17:22:23 crc kubenswrapper[4592]: I0929 17:22:23.985696 4592 generic.go:334] "Generic (PLEG): container finished" podID="0641d7e4-c868-48bd-948d-186401c6f3c7" containerID="0c8a1589623f6e74eb3191636a024f14a94a00f689c34360875b10505c1c0380" exitCode=0 Sep 29 17:22:23 crc kubenswrapper[4592]: I0929 17:22:23.985787 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" event={"ID":"0641d7e4-c868-48bd-948d-186401c6f3c7","Type":"ContainerDied","Data":"0c8a1589623f6e74eb3191636a024f14a94a00f689c34360875b10505c1c0380"} Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.447304 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.601737 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key\") pod \"0641d7e4-c868-48bd-948d-186401c6f3c7\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.601990 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sdk4\" (UniqueName: \"kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4\") pod \"0641d7e4-c868-48bd-948d-186401c6f3c7\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.602606 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory\") pod \"0641d7e4-c868-48bd-948d-186401c6f3c7\" (UID: \"0641d7e4-c868-48bd-948d-186401c6f3c7\") " Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.607242 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4" (OuterVolumeSpecName: "kube-api-access-2sdk4") pod "0641d7e4-c868-48bd-948d-186401c6f3c7" (UID: "0641d7e4-c868-48bd-948d-186401c6f3c7"). InnerVolumeSpecName "kube-api-access-2sdk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.629468 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0641d7e4-c868-48bd-948d-186401c6f3c7" (UID: "0641d7e4-c868-48bd-948d-186401c6f3c7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.632774 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory" (OuterVolumeSpecName: "inventory") pod "0641d7e4-c868-48bd-948d-186401c6f3c7" (UID: "0641d7e4-c868-48bd-948d-186401c6f3c7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.704260 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sdk4\" (UniqueName: \"kubernetes.io/projected/0641d7e4-c868-48bd-948d-186401c6f3c7-kube-api-access-2sdk4\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.704296 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:25 crc kubenswrapper[4592]: I0929 17:22:25.704309 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0641d7e4-c868-48bd-948d-186401c6f3c7-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.004379 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" event={"ID":"0641d7e4-c868-48bd-948d-186401c6f3c7","Type":"ContainerDied","Data":"21d2c1dd85014b19ba76aa89399c66aed6ce8f68ad5cfbdd0cd034ae923b3705"} Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.004411 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.004432 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21d2c1dd85014b19ba76aa89399c66aed6ce8f68ad5cfbdd0cd034ae923b3705" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.105468 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn"] Sep 29 17:22:26 crc kubenswrapper[4592]: E0929 17:22:26.105883 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0641d7e4-c868-48bd-948d-186401c6f3c7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.105899 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="0641d7e4-c868-48bd-948d-186401c6f3c7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.106082 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="0641d7e4-c868-48bd-948d-186401c6f3c7" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.106804 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.110952 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.111736 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.112169 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.113519 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.119665 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn"] Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.211684 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bwt5\" (UniqueName: \"kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.211734 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.211769 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.313849 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.315026 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bwt5\" (UniqueName: \"kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.315089 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.321278 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.329815 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.345768 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bwt5\" (UniqueName: \"kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-79dcn\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.434515 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:22:26 crc kubenswrapper[4592]: I0929 17:22:26.971942 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn"] Sep 29 17:22:27 crc kubenswrapper[4592]: I0929 17:22:27.013589 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" event={"ID":"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6","Type":"ContainerStarted","Data":"eadfdd301c9a09fb1378c171744b1e2b1ee364ee6db3716b3e554fe4d31fe37c"} Sep 29 17:22:27 crc kubenswrapper[4592]: I0929 17:22:27.183049 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:22:27 crc kubenswrapper[4592]: E0929 17:22:27.183451 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:28 crc kubenswrapper[4592]: I0929 17:22:28.022727 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" event={"ID":"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6","Type":"ContainerStarted","Data":"39f493c65f91fd3defe2652041697395b4e0bf84874b4b0f243093800e5aaf72"} Sep 29 17:22:28 crc kubenswrapper[4592]: I0929 17:22:28.042805 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" podStartSLOduration=1.566266047 podStartE2EDuration="2.042783015s" podCreationTimestamp="2025-09-29 17:22:26 +0000 UTC" firstStartedPulling="2025-09-29 17:22:26.978309997 +0000 UTC m=+1877.126087678" lastFinishedPulling="2025-09-29 17:22:27.454826955 +0000 UTC m=+1877.602604646" observedRunningTime="2025-09-29 17:22:28.036612087 +0000 UTC m=+1878.184389778" watchObservedRunningTime="2025-09-29 17:22:28.042783015 +0000 UTC m=+1878.190560696" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.762942 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.765091 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.783795 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.806827 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.806905 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.807091 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28ppn\" (UniqueName: \"kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.908293 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28ppn\" (UniqueName: \"kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.908387 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.908429 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.909086 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.909089 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:29 crc kubenswrapper[4592]: I0929 17:22:29.935102 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28ppn\" (UniqueName: \"kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn\") pod \"certified-operators-lwjdt\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:30 crc kubenswrapper[4592]: I0929 17:22:30.087231 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:30 crc kubenswrapper[4592]: W0929 17:22:30.558531 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84e595de_b3fa_404c_aa7b_aaec9071b083.slice/crio-d999ddc93914397e36dc5b3dca3ec0cf572b545c793d012c9ac789d51f823a88 WatchSource:0}: Error finding container d999ddc93914397e36dc5b3dca3ec0cf572b545c793d012c9ac789d51f823a88: Status 404 returned error can't find the container with id d999ddc93914397e36dc5b3dca3ec0cf572b545c793d012c9ac789d51f823a88 Sep 29 17:22:30 crc kubenswrapper[4592]: I0929 17:22:30.559927 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:31 crc kubenswrapper[4592]: I0929 17:22:31.048635 4592 generic.go:334] "Generic (PLEG): container finished" podID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerID="e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84" exitCode=0 Sep 29 17:22:31 crc kubenswrapper[4592]: I0929 17:22:31.048733 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerDied","Data":"e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84"} Sep 29 17:22:31 crc kubenswrapper[4592]: I0929 17:22:31.048970 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerStarted","Data":"d999ddc93914397e36dc5b3dca3ec0cf572b545c793d012c9ac789d51f823a88"} Sep 29 17:22:32 crc kubenswrapper[4592]: I0929 17:22:32.060175 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerStarted","Data":"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900"} Sep 29 17:22:34 crc kubenswrapper[4592]: I0929 17:22:34.083230 4592 generic.go:334] "Generic (PLEG): container finished" podID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerID="5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900" exitCode=0 Sep 29 17:22:34 crc kubenswrapper[4592]: I0929 17:22:34.083272 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerDied","Data":"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900"} Sep 29 17:22:35 crc kubenswrapper[4592]: I0929 17:22:35.093683 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerStarted","Data":"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7"} Sep 29 17:22:38 crc kubenswrapper[4592]: I0929 17:22:38.037638 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lwjdt" podStartSLOduration=5.597524416 podStartE2EDuration="9.037616734s" podCreationTimestamp="2025-09-29 17:22:29 +0000 UTC" firstStartedPulling="2025-09-29 17:22:31.050614883 +0000 UTC m=+1881.198392564" lastFinishedPulling="2025-09-29 17:22:34.490707201 +0000 UTC m=+1884.638484882" observedRunningTime="2025-09-29 17:22:35.126107256 +0000 UTC m=+1885.273884947" watchObservedRunningTime="2025-09-29 17:22:38.037616734 +0000 UTC m=+1888.185394425" Sep 29 17:22:38 crc kubenswrapper[4592]: I0929 17:22:38.043392 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-pcfkx"] Sep 29 17:22:38 crc kubenswrapper[4592]: I0929 17:22:38.051885 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-pcfkx"] Sep 29 17:22:38 crc kubenswrapper[4592]: I0929 17:22:38.183541 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:22:38 crc kubenswrapper[4592]: E0929 17:22:38.183826 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:39 crc kubenswrapper[4592]: I0929 17:22:39.192782 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b018763-574c-4186-9191-3342af9acbf3" path="/var/lib/kubelet/pods/9b018763-574c-4186-9191-3342af9acbf3/volumes" Sep 29 17:22:40 crc kubenswrapper[4592]: I0929 17:22:40.032597 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-gn5bh"] Sep 29 17:22:40 crc kubenswrapper[4592]: I0929 17:22:40.042375 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-gn5bh"] Sep 29 17:22:40 crc kubenswrapper[4592]: I0929 17:22:40.087997 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:40 crc kubenswrapper[4592]: I0929 17:22:40.088254 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:41 crc kubenswrapper[4592]: I0929 17:22:41.132371 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-lwjdt" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="registry-server" probeResult="failure" output=< Sep 29 17:22:41 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:22:41 crc kubenswrapper[4592]: > Sep 29 17:22:41 crc kubenswrapper[4592]: I0929 17:22:41.222288 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feab94f4-b3ec-465b-8d59-22643f853dc8" path="/var/lib/kubelet/pods/feab94f4-b3ec-465b-8d59-22643f853dc8/volumes" Sep 29 17:22:49 crc kubenswrapper[4592]: I0929 17:22:49.182993 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:22:49 crc kubenswrapper[4592]: E0929 17:22:49.183683 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:22:50 crc kubenswrapper[4592]: I0929 17:22:50.176655 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:50 crc kubenswrapper[4592]: I0929 17:22:50.237834 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:50 crc kubenswrapper[4592]: I0929 17:22:50.431504 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.238397 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lwjdt" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="registry-server" containerID="cri-o://edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7" gracePeriod=2 Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.743057 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.934935 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content\") pod \"84e595de-b3fa-404c-aa7b-aaec9071b083\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.934982 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities\") pod \"84e595de-b3fa-404c-aa7b-aaec9071b083\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.935138 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28ppn\" (UniqueName: \"kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn\") pod \"84e595de-b3fa-404c-aa7b-aaec9071b083\" (UID: \"84e595de-b3fa-404c-aa7b-aaec9071b083\") " Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.935905 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities" (OuterVolumeSpecName: "utilities") pod "84e595de-b3fa-404c-aa7b-aaec9071b083" (UID: "84e595de-b3fa-404c-aa7b-aaec9071b083"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.956079 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn" (OuterVolumeSpecName: "kube-api-access-28ppn") pod "84e595de-b3fa-404c-aa7b-aaec9071b083" (UID: "84e595de-b3fa-404c-aa7b-aaec9071b083"). InnerVolumeSpecName "kube-api-access-28ppn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:22:51 crc kubenswrapper[4592]: I0929 17:22:51.984777 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84e595de-b3fa-404c-aa7b-aaec9071b083" (UID: "84e595de-b3fa-404c-aa7b-aaec9071b083"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.037916 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.037957 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84e595de-b3fa-404c-aa7b-aaec9071b083-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.037994 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28ppn\" (UniqueName: \"kubernetes.io/projected/84e595de-b3fa-404c-aa7b-aaec9071b083-kube-api-access-28ppn\") on node \"crc\" DevicePath \"\"" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.248067 4592 generic.go:334] "Generic (PLEG): container finished" podID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerID="edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7" exitCode=0 Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.248106 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerDied","Data":"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7"} Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.248133 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lwjdt" event={"ID":"84e595de-b3fa-404c-aa7b-aaec9071b083","Type":"ContainerDied","Data":"d999ddc93914397e36dc5b3dca3ec0cf572b545c793d012c9ac789d51f823a88"} Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.248172 4592 scope.go:117] "RemoveContainer" containerID="edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.248182 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lwjdt" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.272478 4592 scope.go:117] "RemoveContainer" containerID="5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.295324 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.314463 4592 scope.go:117] "RemoveContainer" containerID="e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.321957 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lwjdt"] Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.360914 4592 scope.go:117] "RemoveContainer" containerID="edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7" Sep 29 17:22:52 crc kubenswrapper[4592]: E0929 17:22:52.361435 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7\": container with ID starting with edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7 not found: ID does not exist" containerID="edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.361468 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7"} err="failed to get container status \"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7\": rpc error: code = NotFound desc = could not find container \"edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7\": container with ID starting with edcca1456f683777bfcdb89da8e1f2c0ed5f4d51d12d9af0af0bc8597a89a1c7 not found: ID does not exist" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.361489 4592 scope.go:117] "RemoveContainer" containerID="5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900" Sep 29 17:22:52 crc kubenswrapper[4592]: E0929 17:22:52.361851 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900\": container with ID starting with 5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900 not found: ID does not exist" containerID="5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.361870 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900"} err="failed to get container status \"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900\": rpc error: code = NotFound desc = could not find container \"5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900\": container with ID starting with 5c8b38b6dc05b85e20a9332a61ed133c6c3bc3e69ca48275f689122f5e04b900 not found: ID does not exist" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.361882 4592 scope.go:117] "RemoveContainer" containerID="e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84" Sep 29 17:22:52 crc kubenswrapper[4592]: E0929 17:22:52.362303 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84\": container with ID starting with e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84 not found: ID does not exist" containerID="e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84" Sep 29 17:22:52 crc kubenswrapper[4592]: I0929 17:22:52.362328 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84"} err="failed to get container status \"e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84\": rpc error: code = NotFound desc = could not find container \"e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84\": container with ID starting with e0a7ddb7588893a4d8f2731e179f22a003f60a464086de9179b1ffc2c1a7ca84 not found: ID does not exist" Sep 29 17:22:53 crc kubenswrapper[4592]: I0929 17:22:53.197231 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" path="/var/lib/kubelet/pods/84e595de-b3fa-404c-aa7b-aaec9071b083/volumes" Sep 29 17:23:04 crc kubenswrapper[4592]: I0929 17:23:04.183115 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:23:04 crc kubenswrapper[4592]: E0929 17:23:04.183904 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:23:08 crc kubenswrapper[4592]: I0929 17:23:08.407954 4592 generic.go:334] "Generic (PLEG): container finished" podID="8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" containerID="39f493c65f91fd3defe2652041697395b4e0bf84874b4b0f243093800e5aaf72" exitCode=0 Sep 29 17:23:08 crc kubenswrapper[4592]: I0929 17:23:08.408024 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" event={"ID":"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6","Type":"ContainerDied","Data":"39f493c65f91fd3defe2652041697395b4e0bf84874b4b0f243093800e5aaf72"} Sep 29 17:23:09 crc kubenswrapper[4592]: I0929 17:23:09.840493 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:23:09 crc kubenswrapper[4592]: I0929 17:23:09.975810 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key\") pod \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " Sep 29 17:23:09 crc kubenswrapper[4592]: I0929 17:23:09.976730 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bwt5\" (UniqueName: \"kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5\") pod \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " Sep 29 17:23:09 crc kubenswrapper[4592]: I0929 17:23:09.976855 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory\") pod \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\" (UID: \"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6\") " Sep 29 17:23:09 crc kubenswrapper[4592]: I0929 17:23:09.986502 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5" (OuterVolumeSpecName: "kube-api-access-8bwt5") pod "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" (UID: "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6"). InnerVolumeSpecName "kube-api-access-8bwt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.012268 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory" (OuterVolumeSpecName: "inventory") pod "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" (UID: "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.013386 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" (UID: "8896fbe8-6b4f-41d8-a85c-88ea182d4cf6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.079602 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bwt5\" (UniqueName: \"kubernetes.io/projected/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-kube-api-access-8bwt5\") on node \"crc\" DevicePath \"\"" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.079636 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.079644 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8896fbe8-6b4f-41d8-a85c-88ea182d4cf6-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.429585 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" event={"ID":"8896fbe8-6b4f-41d8-a85c-88ea182d4cf6","Type":"ContainerDied","Data":"eadfdd301c9a09fb1378c171744b1e2b1ee364ee6db3716b3e554fe4d31fe37c"} Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.429619 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eadfdd301c9a09fb1378c171744b1e2b1ee364ee6db3716b3e554fe4d31fe37c" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.429664 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-79dcn" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.530405 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq"] Sep 29 17:23:10 crc kubenswrapper[4592]: E0929 17:23:10.530783 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.530801 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:23:10 crc kubenswrapper[4592]: E0929 17:23:10.530814 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="extract-content" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.530819 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="extract-content" Sep 29 17:23:10 crc kubenswrapper[4592]: E0929 17:23:10.530839 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="extract-utilities" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.530846 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="extract-utilities" Sep 29 17:23:10 crc kubenswrapper[4592]: E0929 17:23:10.530857 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="registry-server" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.530862 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="registry-server" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.531026 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="84e595de-b3fa-404c-aa7b-aaec9071b083" containerName="registry-server" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.531039 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8896fbe8-6b4f-41d8-a85c-88ea182d4cf6" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.531655 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.535717 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.535967 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.536225 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.536343 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.545496 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq"] Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.694309 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.694474 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmg6s\" (UniqueName: \"kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.694526 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.796338 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmg6s\" (UniqueName: \"kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.796734 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.797007 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.804628 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.805683 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.825365 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmg6s\" (UniqueName: \"kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:10 crc kubenswrapper[4592]: I0929 17:23:10.849451 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:23:11 crc kubenswrapper[4592]: I0929 17:23:11.327919 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq"] Sep 29 17:23:11 crc kubenswrapper[4592]: I0929 17:23:11.446170 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" event={"ID":"33421f74-e3cd-4318-b751-ed324d225253","Type":"ContainerStarted","Data":"76b7433b05c8454527444736c248e6f2d774786e6db2ee417d1c12f6c257eccb"} Sep 29 17:23:11 crc kubenswrapper[4592]: I0929 17:23:11.788858 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:23:12 crc kubenswrapper[4592]: I0929 17:23:12.456842 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" event={"ID":"33421f74-e3cd-4318-b751-ed324d225253","Type":"ContainerStarted","Data":"f22367fc92c2686b2151e9aa5f786f60fdc1fc323a7fdf91b9534913f22fcc04"} Sep 29 17:23:12 crc kubenswrapper[4592]: I0929 17:23:12.470703 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" podStartSLOduration=2.020992142 podStartE2EDuration="2.470683227s" podCreationTimestamp="2025-09-29 17:23:10 +0000 UTC" firstStartedPulling="2025-09-29 17:23:11.336588999 +0000 UTC m=+1921.484366680" lastFinishedPulling="2025-09-29 17:23:11.786280064 +0000 UTC m=+1921.934057765" observedRunningTime="2025-09-29 17:23:12.46898103 +0000 UTC m=+1922.616758731" watchObservedRunningTime="2025-09-29 17:23:12.470683227 +0000 UTC m=+1922.618460908" Sep 29 17:23:18 crc kubenswrapper[4592]: I0929 17:23:18.183361 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:23:18 crc kubenswrapper[4592]: E0929 17:23:18.183885 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:23:21 crc kubenswrapper[4592]: I0929 17:23:21.746589 4592 scope.go:117] "RemoveContainer" containerID="96d505e5f0c53c8ca54d8a0fc72ef170f89636e7265d441f402abbfb1430dbd8" Sep 29 17:23:21 crc kubenswrapper[4592]: I0929 17:23:21.790512 4592 scope.go:117] "RemoveContainer" containerID="f52982daad45aae0770d61f69be5335ba029b80378ba5c632387ba50df28b73c" Sep 29 17:23:25 crc kubenswrapper[4592]: I0929 17:23:25.057447 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-m6cdw"] Sep 29 17:23:25 crc kubenswrapper[4592]: I0929 17:23:25.067124 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-m6cdw"] Sep 29 17:23:25 crc kubenswrapper[4592]: I0929 17:23:25.193373 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2da5133b-922b-406d-8895-ef8b6c3907f9" path="/var/lib/kubelet/pods/2da5133b-922b-406d-8895-ef8b6c3907f9/volumes" Sep 29 17:23:32 crc kubenswrapper[4592]: I0929 17:23:32.183478 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:23:32 crc kubenswrapper[4592]: E0929 17:23:32.184634 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:23:44 crc kubenswrapper[4592]: I0929 17:23:44.184142 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:23:44 crc kubenswrapper[4592]: E0929 17:23:44.185302 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:23:56 crc kubenswrapper[4592]: I0929 17:23:56.183878 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:23:56 crc kubenswrapper[4592]: E0929 17:23:56.184737 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:24:05 crc kubenswrapper[4592]: I0929 17:24:05.955695 4592 generic.go:334] "Generic (PLEG): container finished" podID="33421f74-e3cd-4318-b751-ed324d225253" containerID="f22367fc92c2686b2151e9aa5f786f60fdc1fc323a7fdf91b9534913f22fcc04" exitCode=0 Sep 29 17:24:05 crc kubenswrapper[4592]: I0929 17:24:05.955785 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" event={"ID":"33421f74-e3cd-4318-b751-ed324d225253","Type":"ContainerDied","Data":"f22367fc92c2686b2151e9aa5f786f60fdc1fc323a7fdf91b9534913f22fcc04"} Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.359647 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.502814 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmg6s\" (UniqueName: \"kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s\") pod \"33421f74-e3cd-4318-b751-ed324d225253\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.502951 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key\") pod \"33421f74-e3cd-4318-b751-ed324d225253\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.502977 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory\") pod \"33421f74-e3cd-4318-b751-ed324d225253\" (UID: \"33421f74-e3cd-4318-b751-ed324d225253\") " Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.517529 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s" (OuterVolumeSpecName: "kube-api-access-pmg6s") pod "33421f74-e3cd-4318-b751-ed324d225253" (UID: "33421f74-e3cd-4318-b751-ed324d225253"). InnerVolumeSpecName "kube-api-access-pmg6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.532256 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory" (OuterVolumeSpecName: "inventory") pod "33421f74-e3cd-4318-b751-ed324d225253" (UID: "33421f74-e3cd-4318-b751-ed324d225253"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.547813 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "33421f74-e3cd-4318-b751-ed324d225253" (UID: "33421f74-e3cd-4318-b751-ed324d225253"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.605752 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmg6s\" (UniqueName: \"kubernetes.io/projected/33421f74-e3cd-4318-b751-ed324d225253-kube-api-access-pmg6s\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.605791 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.605822 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/33421f74-e3cd-4318-b751-ed324d225253-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.986193 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" event={"ID":"33421f74-e3cd-4318-b751-ed324d225253","Type":"ContainerDied","Data":"76b7433b05c8454527444736c248e6f2d774786e6db2ee417d1c12f6c257eccb"} Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.986270 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq" Sep 29 17:24:07 crc kubenswrapper[4592]: I0929 17:24:07.986271 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76b7433b05c8454527444736c248e6f2d774786e6db2ee417d1c12f6c257eccb" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.114256 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2nfwh"] Sep 29 17:24:08 crc kubenswrapper[4592]: E0929 17:24:08.115412 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33421f74-e3cd-4318-b751-ed324d225253" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.115548 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="33421f74-e3cd-4318-b751-ed324d225253" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.115884 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="33421f74-e3cd-4318-b751-ed324d225253" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.116913 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.120597 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.120604 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.120912 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.121199 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.125074 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.125168 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.125358 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6v57\" (UniqueName: \"kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.126266 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2nfwh"] Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.227076 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6v57\" (UniqueName: \"kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.227242 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.227274 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.239370 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.243424 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.246968 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6v57\" (UniqueName: \"kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57\") pod \"ssh-known-hosts-edpm-deployment-2nfwh\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:08 crc kubenswrapper[4592]: I0929 17:24:08.440350 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:09 crc kubenswrapper[4592]: I0929 17:24:09.069474 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-2nfwh"] Sep 29 17:24:09 crc kubenswrapper[4592]: I0929 17:24:09.078716 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:24:10 crc kubenswrapper[4592]: I0929 17:24:10.008212 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" event={"ID":"af7efc5b-eb67-4660-92ae-77d6efa85b0f","Type":"ContainerStarted","Data":"cc9245e8a1f2e4cf80e19c42f4064f1ad82bdda8f977fc23d4e4025f7ae31e60"} Sep 29 17:24:10 crc kubenswrapper[4592]: I0929 17:24:10.008563 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" event={"ID":"af7efc5b-eb67-4660-92ae-77d6efa85b0f","Type":"ContainerStarted","Data":"08e28e6b6c9d0b90b303b7d9646a8db5df41f5505bc14214cee5d7439fbccaa5"} Sep 29 17:24:10 crc kubenswrapper[4592]: I0929 17:24:10.185311 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:24:10 crc kubenswrapper[4592]: E0929 17:24:10.185587 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:24:17 crc kubenswrapper[4592]: I0929 17:24:17.076636 4592 generic.go:334] "Generic (PLEG): container finished" podID="af7efc5b-eb67-4660-92ae-77d6efa85b0f" containerID="cc9245e8a1f2e4cf80e19c42f4064f1ad82bdda8f977fc23d4e4025f7ae31e60" exitCode=0 Sep 29 17:24:17 crc kubenswrapper[4592]: I0929 17:24:17.076763 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" event={"ID":"af7efc5b-eb67-4660-92ae-77d6efa85b0f","Type":"ContainerDied","Data":"cc9245e8a1f2e4cf80e19c42f4064f1ad82bdda8f977fc23d4e4025f7ae31e60"} Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.512745 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.549870 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0\") pod \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.549921 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6v57\" (UniqueName: \"kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57\") pod \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.550020 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam\") pod \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\" (UID: \"af7efc5b-eb67-4660-92ae-77d6efa85b0f\") " Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.559391 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57" (OuterVolumeSpecName: "kube-api-access-b6v57") pod "af7efc5b-eb67-4660-92ae-77d6efa85b0f" (UID: "af7efc5b-eb67-4660-92ae-77d6efa85b0f"). InnerVolumeSpecName "kube-api-access-b6v57". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.584645 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "af7efc5b-eb67-4660-92ae-77d6efa85b0f" (UID: "af7efc5b-eb67-4660-92ae-77d6efa85b0f"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.590374 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "af7efc5b-eb67-4660-92ae-77d6efa85b0f" (UID: "af7efc5b-eb67-4660-92ae-77d6efa85b0f"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.652388 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.652417 4592 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/af7efc5b-eb67-4660-92ae-77d6efa85b0f-inventory-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:18 crc kubenswrapper[4592]: I0929 17:24:18.652426 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6v57\" (UniqueName: \"kubernetes.io/projected/af7efc5b-eb67-4660-92ae-77d6efa85b0f-kube-api-access-b6v57\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.097359 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" event={"ID":"af7efc5b-eb67-4660-92ae-77d6efa85b0f","Type":"ContainerDied","Data":"08e28e6b6c9d0b90b303b7d9646a8db5df41f5505bc14214cee5d7439fbccaa5"} Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.097423 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08e28e6b6c9d0b90b303b7d9646a8db5df41f5505bc14214cee5d7439fbccaa5" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.097512 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-2nfwh" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.205383 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2"] Sep 29 17:24:19 crc kubenswrapper[4592]: E0929 17:24:19.206098 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af7efc5b-eb67-4660-92ae-77d6efa85b0f" containerName="ssh-known-hosts-edpm-deployment" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.206128 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="af7efc5b-eb67-4660-92ae-77d6efa85b0f" containerName="ssh-known-hosts-edpm-deployment" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.206502 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="af7efc5b-eb67-4660-92ae-77d6efa85b0f" containerName="ssh-known-hosts-edpm-deployment" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.207640 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2"] Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.207800 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.212556 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.212644 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.222491 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.222601 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.265462 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g75c2\" (UniqueName: \"kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.265584 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.265701 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.367763 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g75c2\" (UniqueName: \"kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.367937 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.368058 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.373695 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.373738 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.384453 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g75c2\" (UniqueName: \"kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-gvrb2\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:19 crc kubenswrapper[4592]: I0929 17:24:19.527370 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:20 crc kubenswrapper[4592]: I0929 17:24:20.091567 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2"] Sep 29 17:24:20 crc kubenswrapper[4592]: W0929 17:24:20.096703 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a007423_0554_48b3_b38a_d23f2509aacd.slice/crio-ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3 WatchSource:0}: Error finding container ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3: Status 404 returned error can't find the container with id ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3 Sep 29 17:24:20 crc kubenswrapper[4592]: I0929 17:24:20.106923 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" event={"ID":"6a007423-0554-48b3-b38a-d23f2509aacd","Type":"ContainerStarted","Data":"ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3"} Sep 29 17:24:21 crc kubenswrapper[4592]: I0929 17:24:21.122596 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" event={"ID":"6a007423-0554-48b3-b38a-d23f2509aacd","Type":"ContainerStarted","Data":"599e722c37ba24f683c3942461e2a404ea0ffe08c69cef1114bf2ff41734c61d"} Sep 29 17:24:21 crc kubenswrapper[4592]: I0929 17:24:21.149626 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" podStartSLOduration=1.441837212 podStartE2EDuration="2.149605682s" podCreationTimestamp="2025-09-29 17:24:19 +0000 UTC" firstStartedPulling="2025-09-29 17:24:20.099494066 +0000 UTC m=+1990.247271757" lastFinishedPulling="2025-09-29 17:24:20.807262546 +0000 UTC m=+1990.955040227" observedRunningTime="2025-09-29 17:24:21.141797699 +0000 UTC m=+1991.289575380" watchObservedRunningTime="2025-09-29 17:24:21.149605682 +0000 UTC m=+1991.297383383" Sep 29 17:24:21 crc kubenswrapper[4592]: I0929 17:24:21.182665 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:24:21 crc kubenswrapper[4592]: E0929 17:24:21.182951 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:24:21 crc kubenswrapper[4592]: I0929 17:24:21.892800 4592 scope.go:117] "RemoveContainer" containerID="4f61a4ef10488b20c9096ca3b9ddfa329bd291837f3e6de80a5ac89519252ece" Sep 29 17:24:30 crc kubenswrapper[4592]: I0929 17:24:30.201537 4592 generic.go:334] "Generic (PLEG): container finished" podID="6a007423-0554-48b3-b38a-d23f2509aacd" containerID="599e722c37ba24f683c3942461e2a404ea0ffe08c69cef1114bf2ff41734c61d" exitCode=0 Sep 29 17:24:30 crc kubenswrapper[4592]: I0929 17:24:30.202317 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" event={"ID":"6a007423-0554-48b3-b38a-d23f2509aacd","Type":"ContainerDied","Data":"599e722c37ba24f683c3942461e2a404ea0ffe08c69cef1114bf2ff41734c61d"} Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.698837 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.809871 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory\") pod \"6a007423-0554-48b3-b38a-d23f2509aacd\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.809960 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key\") pod \"6a007423-0554-48b3-b38a-d23f2509aacd\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.810070 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g75c2\" (UniqueName: \"kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2\") pod \"6a007423-0554-48b3-b38a-d23f2509aacd\" (UID: \"6a007423-0554-48b3-b38a-d23f2509aacd\") " Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.815441 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2" (OuterVolumeSpecName: "kube-api-access-g75c2") pod "6a007423-0554-48b3-b38a-d23f2509aacd" (UID: "6a007423-0554-48b3-b38a-d23f2509aacd"). InnerVolumeSpecName "kube-api-access-g75c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.842951 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6a007423-0554-48b3-b38a-d23f2509aacd" (UID: "6a007423-0554-48b3-b38a-d23f2509aacd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.857261 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory" (OuterVolumeSpecName: "inventory") pod "6a007423-0554-48b3-b38a-d23f2509aacd" (UID: "6a007423-0554-48b3-b38a-d23f2509aacd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.912437 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.912725 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6a007423-0554-48b3-b38a-d23f2509aacd-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:31 crc kubenswrapper[4592]: I0929 17:24:31.912735 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g75c2\" (UniqueName: \"kubernetes.io/projected/6a007423-0554-48b3-b38a-d23f2509aacd-kube-api-access-g75c2\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.220383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" event={"ID":"6a007423-0554-48b3-b38a-d23f2509aacd","Type":"ContainerDied","Data":"ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3"} Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.220612 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccd7ef0cccad51fd54d4c29517e05756900a0168882852c41e9bf553aa2983b3" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.220448 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-gvrb2" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.463996 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9"] Sep 29 17:24:32 crc kubenswrapper[4592]: E0929 17:24:32.465131 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a007423-0554-48b3-b38a-d23f2509aacd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.465178 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a007423-0554-48b3-b38a-d23f2509aacd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.465809 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a007423-0554-48b3-b38a-d23f2509aacd" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.467025 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.471358 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.471601 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.471692 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.475602 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.476348 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9"] Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.631667 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzl5v\" (UniqueName: \"kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.631834 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.632013 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.733560 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzl5v\" (UniqueName: \"kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.733644 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.733707 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.737966 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.738996 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.758430 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzl5v\" (UniqueName: \"kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:32 crc kubenswrapper[4592]: I0929 17:24:32.799346 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:33 crc kubenswrapper[4592]: I0929 17:24:33.183594 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:24:33 crc kubenswrapper[4592]: W0929 17:24:33.401051 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfffd0339_970b_41b0_b868_de31bfdc29b0.slice/crio-a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b WatchSource:0}: Error finding container a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b: Status 404 returned error can't find the container with id a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b Sep 29 17:24:33 crc kubenswrapper[4592]: I0929 17:24:33.404954 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9"] Sep 29 17:24:34 crc kubenswrapper[4592]: I0929 17:24:34.245209 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74"} Sep 29 17:24:34 crc kubenswrapper[4592]: I0929 17:24:34.257731 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" event={"ID":"fffd0339-970b-41b0-b868-de31bfdc29b0","Type":"ContainerStarted","Data":"86fc482b5af6a5f33c3d2f90a735206b4c5ef1d807fdfa46f058eb8fdec63289"} Sep 29 17:24:34 crc kubenswrapper[4592]: I0929 17:24:34.257780 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" event={"ID":"fffd0339-970b-41b0-b868-de31bfdc29b0","Type":"ContainerStarted","Data":"a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b"} Sep 29 17:24:34 crc kubenswrapper[4592]: I0929 17:24:34.298831 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" podStartSLOduration=1.778658729 podStartE2EDuration="2.298788527s" podCreationTimestamp="2025-09-29 17:24:32 +0000 UTC" firstStartedPulling="2025-09-29 17:24:33.404186977 +0000 UTC m=+2003.551964658" lastFinishedPulling="2025-09-29 17:24:33.924316785 +0000 UTC m=+2004.072094456" observedRunningTime="2025-09-29 17:24:34.293252686 +0000 UTC m=+2004.441030377" watchObservedRunningTime="2025-09-29 17:24:34.298788527 +0000 UTC m=+2004.446566218" Sep 29 17:24:45 crc kubenswrapper[4592]: I0929 17:24:45.351576 4592 generic.go:334] "Generic (PLEG): container finished" podID="fffd0339-970b-41b0-b868-de31bfdc29b0" containerID="86fc482b5af6a5f33c3d2f90a735206b4c5ef1d807fdfa46f058eb8fdec63289" exitCode=0 Sep 29 17:24:45 crc kubenswrapper[4592]: I0929 17:24:45.351963 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" event={"ID":"fffd0339-970b-41b0-b868-de31bfdc29b0","Type":"ContainerDied","Data":"86fc482b5af6a5f33c3d2f90a735206b4c5ef1d807fdfa46f058eb8fdec63289"} Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.750215 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.833434 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key\") pod \"fffd0339-970b-41b0-b868-de31bfdc29b0\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.833818 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzl5v\" (UniqueName: \"kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v\") pod \"fffd0339-970b-41b0-b868-de31bfdc29b0\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.833926 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory\") pod \"fffd0339-970b-41b0-b868-de31bfdc29b0\" (UID: \"fffd0339-970b-41b0-b868-de31bfdc29b0\") " Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.840396 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v" (OuterVolumeSpecName: "kube-api-access-lzl5v") pod "fffd0339-970b-41b0-b868-de31bfdc29b0" (UID: "fffd0339-970b-41b0-b868-de31bfdc29b0"). InnerVolumeSpecName "kube-api-access-lzl5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.866002 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory" (OuterVolumeSpecName: "inventory") pod "fffd0339-970b-41b0-b868-de31bfdc29b0" (UID: "fffd0339-970b-41b0-b868-de31bfdc29b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.867830 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fffd0339-970b-41b0-b868-de31bfdc29b0" (UID: "fffd0339-970b-41b0-b868-de31bfdc29b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.936300 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.936333 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzl5v\" (UniqueName: \"kubernetes.io/projected/fffd0339-970b-41b0-b868-de31bfdc29b0-kube-api-access-lzl5v\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:46 crc kubenswrapper[4592]: I0929 17:24:46.936344 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fffd0339-970b-41b0-b868-de31bfdc29b0-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.371545 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" event={"ID":"fffd0339-970b-41b0-b868-de31bfdc29b0","Type":"ContainerDied","Data":"a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b"} Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.371837 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a55d202391c17edfdb2c1ff1ed82272e8becf78b67f7c30005a29561852b3a1b" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.371630 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.551876 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p"] Sep 29 17:24:47 crc kubenswrapper[4592]: E0929 17:24:47.552468 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fffd0339-970b-41b0-b868-de31bfdc29b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.552500 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="fffd0339-970b-41b0-b868-de31bfdc29b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.552796 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="fffd0339-970b-41b0-b868-de31bfdc29b0" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.553714 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.561371 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.561753 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.561922 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.562512 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.562661 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.562907 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.563133 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.564452 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.573249 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p"] Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656089 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656218 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656296 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wz89\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656351 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656392 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656454 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656484 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656516 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656549 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656580 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656610 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656643 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656683 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.656763 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758326 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758386 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758423 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758446 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758465 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758488 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758510 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758534 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758559 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758589 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758673 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758697 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.758716 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wz89\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.762747 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.763614 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.764868 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.765066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.765224 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.765859 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.766451 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.766976 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.767193 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.767954 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.769660 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.774609 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.775880 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wz89\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.781867 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:47 crc kubenswrapper[4592]: I0929 17:24:47.868895 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:24:48 crc kubenswrapper[4592]: I0929 17:24:48.377140 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p"] Sep 29 17:24:48 crc kubenswrapper[4592]: W0929 17:24:48.382375 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b5035f5_4d62_4661_8067_869b1e54997e.slice/crio-665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400 WatchSource:0}: Error finding container 665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400: Status 404 returned error can't find the container with id 665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400 Sep 29 17:24:49 crc kubenswrapper[4592]: I0929 17:24:49.392556 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" event={"ID":"3b5035f5-4d62-4661-8067-869b1e54997e","Type":"ContainerStarted","Data":"c9052a10a186f3bf20e93daf7ec56288ebfd78efe83c07077765105a6a27d8b9"} Sep 29 17:24:49 crc kubenswrapper[4592]: I0929 17:24:49.392944 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" event={"ID":"3b5035f5-4d62-4661-8067-869b1e54997e","Type":"ContainerStarted","Data":"665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400"} Sep 29 17:24:49 crc kubenswrapper[4592]: I0929 17:24:49.419091 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" podStartSLOduration=1.8965675050000002 podStartE2EDuration="2.419075664s" podCreationTimestamp="2025-09-29 17:24:47 +0000 UTC" firstStartedPulling="2025-09-29 17:24:48.383613666 +0000 UTC m=+2018.531391347" lastFinishedPulling="2025-09-29 17:24:48.906121825 +0000 UTC m=+2019.053899506" observedRunningTime="2025-09-29 17:24:49.411808226 +0000 UTC m=+2019.559585917" watchObservedRunningTime="2025-09-29 17:24:49.419075664 +0000 UTC m=+2019.566853345" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.738394 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.742315 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.759845 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.814412 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.814463 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhslb\" (UniqueName: \"kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.814564 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.916766 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.916837 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhslb\" (UniqueName: \"kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.916943 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.917339 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.917383 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:23 crc kubenswrapper[4592]: I0929 17:25:23.939992 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhslb\" (UniqueName: \"kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb\") pod \"community-operators-sxll2\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:24 crc kubenswrapper[4592]: I0929 17:25:24.065202 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:24 crc kubenswrapper[4592]: I0929 17:25:24.679975 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:24 crc kubenswrapper[4592]: I0929 17:25:24.740374 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerStarted","Data":"66fdc01aa2869bf97a09e193c5618e077c66bc589ba5ca158b0c86c77c510bd2"} Sep 29 17:25:25 crc kubenswrapper[4592]: I0929 17:25:25.749830 4592 generic.go:334] "Generic (PLEG): container finished" podID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerID="8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2" exitCode=0 Sep 29 17:25:25 crc kubenswrapper[4592]: I0929 17:25:25.749887 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerDied","Data":"8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2"} Sep 29 17:25:27 crc kubenswrapper[4592]: I0929 17:25:27.770081 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerStarted","Data":"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d"} Sep 29 17:25:28 crc kubenswrapper[4592]: I0929 17:25:28.780698 4592 generic.go:334] "Generic (PLEG): container finished" podID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerID="9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d" exitCode=0 Sep 29 17:25:28 crc kubenswrapper[4592]: I0929 17:25:28.780915 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerDied","Data":"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d"} Sep 29 17:25:29 crc kubenswrapper[4592]: I0929 17:25:29.791621 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerStarted","Data":"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3"} Sep 29 17:25:29 crc kubenswrapper[4592]: I0929 17:25:29.824660 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-sxll2" podStartSLOduration=3.296441349 podStartE2EDuration="6.824634663s" podCreationTimestamp="2025-09-29 17:25:23 +0000 UTC" firstStartedPulling="2025-09-29 17:25:25.752867048 +0000 UTC m=+2055.900644729" lastFinishedPulling="2025-09-29 17:25:29.281060362 +0000 UTC m=+2059.428838043" observedRunningTime="2025-09-29 17:25:29.812643718 +0000 UTC m=+2059.960421419" watchObservedRunningTime="2025-09-29 17:25:29.824634663 +0000 UTC m=+2059.972412364" Sep 29 17:25:30 crc kubenswrapper[4592]: I0929 17:25:30.802240 4592 generic.go:334] "Generic (PLEG): container finished" podID="3b5035f5-4d62-4661-8067-869b1e54997e" containerID="c9052a10a186f3bf20e93daf7ec56288ebfd78efe83c07077765105a6a27d8b9" exitCode=0 Sep 29 17:25:30 crc kubenswrapper[4592]: I0929 17:25:30.802310 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" event={"ID":"3b5035f5-4d62-4661-8067-869b1e54997e","Type":"ContainerDied","Data":"c9052a10a186f3bf20e93daf7ec56288ebfd78efe83c07077765105a6a27d8b9"} Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.257215 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308038 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308166 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308201 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wz89\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308252 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308287 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308325 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308443 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308507 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308527 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308577 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308609 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308643 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308669 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.308705 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle\") pod \"3b5035f5-4d62-4661-8067-869b1e54997e\" (UID: \"3b5035f5-4d62-4661-8067-869b1e54997e\") " Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.314413 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.314682 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.315508 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.316212 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.316987 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89" (OuterVolumeSpecName: "kube-api-access-5wz89") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "kube-api-access-5wz89". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.318020 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.319040 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.320860 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.321900 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.321921 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.322987 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.330264 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.346072 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory" (OuterVolumeSpecName: "inventory") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.353904 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3b5035f5-4d62-4661-8067-869b1e54997e" (UID: "3b5035f5-4d62-4661-8067-869b1e54997e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410232 4592 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410272 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410292 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410312 4592 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410329 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410346 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410363 4592 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410380 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410397 4592 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410414 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wz89\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-kube-api-access-5wz89\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410431 4592 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410448 4592 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410464 4592 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b5035f5-4d62-4661-8067-869b1e54997e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.410483 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3b5035f5-4d62-4661-8067-869b1e54997e-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.821475 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" event={"ID":"3b5035f5-4d62-4661-8067-869b1e54997e","Type":"ContainerDied","Data":"665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400"} Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.821531 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="665b17b2476f8bdd3fbdd42b2ca17cfa035e693e1fce152e16beaa483f6b2400" Sep 29 17:25:32 crc kubenswrapper[4592]: I0929 17:25:32.821536 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.008127 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh"] Sep 29 17:25:33 crc kubenswrapper[4592]: E0929 17:25:33.008871 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b5035f5-4d62-4661-8067-869b1e54997e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.008898 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b5035f5-4d62-4661-8067-869b1e54997e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.009177 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b5035f5-4d62-4661-8067-869b1e54997e" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.010005 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.011899 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.011958 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.012662 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.016850 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh"] Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.019934 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.020263 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.020924 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.020976 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.021030 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s67fq\" (UniqueName: \"kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.021207 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.021266 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.122616 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.122756 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.122810 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.122846 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s67fq\" (UniqueName: \"kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.123003 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.123875 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.129036 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.129506 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.132175 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.149504 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s67fq\" (UniqueName: \"kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-qdjjh\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.327847 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:25:33 crc kubenswrapper[4592]: I0929 17:25:33.892334 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh"] Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.065356 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.066327 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.116938 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.840127 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" event={"ID":"121a0489-01a2-492b-a564-2718b687e621","Type":"ContainerStarted","Data":"e63b7cdc940debe464e30861eed6bb82cd9e1d5ae190d839af36708b308ebc1d"} Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.840413 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" event={"ID":"121a0489-01a2-492b-a564-2718b687e621","Type":"ContainerStarted","Data":"72bc951c2270929c70a10b9a5cbd164e7aa9646924d111888ed92e7591630e93"} Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.859662 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" podStartSLOduration=2.348447063 podStartE2EDuration="2.859645774s" podCreationTimestamp="2025-09-29 17:25:32 +0000 UTC" firstStartedPulling="2025-09-29 17:25:33.910763649 +0000 UTC m=+2064.058541340" lastFinishedPulling="2025-09-29 17:25:34.42196236 +0000 UTC m=+2064.569740051" observedRunningTime="2025-09-29 17:25:34.858185544 +0000 UTC m=+2065.005963225" watchObservedRunningTime="2025-09-29 17:25:34.859645774 +0000 UTC m=+2065.007423455" Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.887738 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:34 crc kubenswrapper[4592]: I0929 17:25:34.936421 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:36 crc kubenswrapper[4592]: I0929 17:25:36.859029 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-sxll2" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="registry-server" containerID="cri-o://d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3" gracePeriod=2 Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.306722 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.345330 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content\") pod \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.345468 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities\") pod \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.345517 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhslb\" (UniqueName: \"kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb\") pod \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\" (UID: \"b077a14a-4cc2-4812-b675-edd5a40cdcd6\") " Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.346450 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities" (OuterVolumeSpecName: "utilities") pod "b077a14a-4cc2-4812-b675-edd5a40cdcd6" (UID: "b077a14a-4cc2-4812-b675-edd5a40cdcd6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.359125 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb" (OuterVolumeSpecName: "kube-api-access-vhslb") pod "b077a14a-4cc2-4812-b675-edd5a40cdcd6" (UID: "b077a14a-4cc2-4812-b675-edd5a40cdcd6"). InnerVolumeSpecName "kube-api-access-vhslb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.393541 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b077a14a-4cc2-4812-b675-edd5a40cdcd6" (UID: "b077a14a-4cc2-4812-b675-edd5a40cdcd6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.446931 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.446967 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhslb\" (UniqueName: \"kubernetes.io/projected/b077a14a-4cc2-4812-b675-edd5a40cdcd6-kube-api-access-vhslb\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.446978 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b077a14a-4cc2-4812-b675-edd5a40cdcd6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.872031 4592 generic.go:334] "Generic (PLEG): container finished" podID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerID="d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3" exitCode=0 Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.872089 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerDied","Data":"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3"} Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.872126 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-sxll2" event={"ID":"b077a14a-4cc2-4812-b675-edd5a40cdcd6","Type":"ContainerDied","Data":"66fdc01aa2869bf97a09e193c5618e077c66bc589ba5ca158b0c86c77c510bd2"} Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.872186 4592 scope.go:117] "RemoveContainer" containerID="d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.872278 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-sxll2" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.926425 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.933586 4592 scope.go:117] "RemoveContainer" containerID="9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d" Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.936227 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-sxll2"] Sep 29 17:25:37 crc kubenswrapper[4592]: I0929 17:25:37.962472 4592 scope.go:117] "RemoveContainer" containerID="8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.000599 4592 scope.go:117] "RemoveContainer" containerID="d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3" Sep 29 17:25:38 crc kubenswrapper[4592]: E0929 17:25:38.001005 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3\": container with ID starting with d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3 not found: ID does not exist" containerID="d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.001041 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3"} err="failed to get container status \"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3\": rpc error: code = NotFound desc = could not find container \"d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3\": container with ID starting with d0493dadeea5d6acae8c678e33c22357492fa38108e2bbc9b9c49031f913b4f3 not found: ID does not exist" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.001067 4592 scope.go:117] "RemoveContainer" containerID="9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d" Sep 29 17:25:38 crc kubenswrapper[4592]: E0929 17:25:38.001425 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d\": container with ID starting with 9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d not found: ID does not exist" containerID="9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.001449 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d"} err="failed to get container status \"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d\": rpc error: code = NotFound desc = could not find container \"9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d\": container with ID starting with 9a5a008884bdcee2964a3f657f0e69c6648f6909e9f85fcc835b99001d12663d not found: ID does not exist" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.001467 4592 scope.go:117] "RemoveContainer" containerID="8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2" Sep 29 17:25:38 crc kubenswrapper[4592]: E0929 17:25:38.001766 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2\": container with ID starting with 8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2 not found: ID does not exist" containerID="8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2" Sep 29 17:25:38 crc kubenswrapper[4592]: I0929 17:25:38.001797 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2"} err="failed to get container status \"8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2\": rpc error: code = NotFound desc = could not find container \"8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2\": container with ID starting with 8949c03987e316e69dd7dae937c613f2a1b97b68302c3adf5e132e733359caf2 not found: ID does not exist" Sep 29 17:25:39 crc kubenswrapper[4592]: I0929 17:25:39.197237 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" path="/var/lib/kubelet/pods/b077a14a-4cc2-4812-b675-edd5a40cdcd6/volumes" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.563600 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:15 crc kubenswrapper[4592]: E0929 17:26:15.566312 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="extract-content" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.566436 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="extract-content" Sep 29 17:26:15 crc kubenswrapper[4592]: E0929 17:26:15.566542 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="registry-server" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.566627 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="registry-server" Sep 29 17:26:15 crc kubenswrapper[4592]: E0929 17:26:15.566734 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="extract-utilities" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.566821 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="extract-utilities" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.567125 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b077a14a-4cc2-4812-b675-edd5a40cdcd6" containerName="registry-server" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.568809 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.587323 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.726328 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.726625 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74g9k\" (UniqueName: \"kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.726786 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.828598 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.828692 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74g9k\" (UniqueName: \"kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.828729 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.828987 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.829112 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.855793 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74g9k\" (UniqueName: \"kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k\") pod \"redhat-marketplace-vt4sw\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:15 crc kubenswrapper[4592]: I0929 17:26:15.897501 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:16 crc kubenswrapper[4592]: I0929 17:26:16.360108 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.271058 4592 generic.go:334] "Generic (PLEG): container finished" podID="ac720be2-a313-400e-805b-05228f1fd9d5" containerID="ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1" exitCode=0 Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.271361 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerDied","Data":"ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1"} Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.271383 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerStarted","Data":"646ee595a94ed7f3899d6e9fff498fa83526c426a84759958687968f3549e169"} Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.960961 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.965588 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:17 crc kubenswrapper[4592]: I0929 17:26:17.982859 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.076498 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptlzt\" (UniqueName: \"kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.076583 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.076622 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.178645 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.178819 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptlzt\" (UniqueName: \"kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.178894 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.179440 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.179441 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.203261 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptlzt\" (UniqueName: \"kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt\") pod \"redhat-operators-zsgql\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.309002 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:18 crc kubenswrapper[4592]: I0929 17:26:18.657100 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:19 crc kubenswrapper[4592]: I0929 17:26:19.300999 4592 generic.go:334] "Generic (PLEG): container finished" podID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerID="6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2" exitCode=0 Sep 29 17:26:19 crc kubenswrapper[4592]: I0929 17:26:19.301039 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerDied","Data":"6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2"} Sep 29 17:26:19 crc kubenswrapper[4592]: I0929 17:26:19.301444 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerStarted","Data":"0ed93c96a20cbfca178fc2cc9474289feb99bfa030bf24e8ea2ba7c7a29b20d0"} Sep 29 17:26:19 crc kubenswrapper[4592]: I0929 17:26:19.304069 4592 generic.go:334] "Generic (PLEG): container finished" podID="ac720be2-a313-400e-805b-05228f1fd9d5" containerID="33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8" exitCode=0 Sep 29 17:26:19 crc kubenswrapper[4592]: I0929 17:26:19.304100 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerDied","Data":"33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8"} Sep 29 17:26:20 crc kubenswrapper[4592]: I0929 17:26:20.313439 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerStarted","Data":"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3"} Sep 29 17:26:20 crc kubenswrapper[4592]: I0929 17:26:20.318641 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerStarted","Data":"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019"} Sep 29 17:26:20 crc kubenswrapper[4592]: I0929 17:26:20.358438 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vt4sw" podStartSLOduration=2.920079356 podStartE2EDuration="5.358417335s" podCreationTimestamp="2025-09-29 17:26:15 +0000 UTC" firstStartedPulling="2025-09-29 17:26:17.272791547 +0000 UTC m=+2107.420569228" lastFinishedPulling="2025-09-29 17:26:19.711129526 +0000 UTC m=+2109.858907207" observedRunningTime="2025-09-29 17:26:20.354275363 +0000 UTC m=+2110.502053054" watchObservedRunningTime="2025-09-29 17:26:20.358417335 +0000 UTC m=+2110.506195016" Sep 29 17:26:25 crc kubenswrapper[4592]: I0929 17:26:25.363466 4592 generic.go:334] "Generic (PLEG): container finished" podID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerID="d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3" exitCode=0 Sep 29 17:26:25 crc kubenswrapper[4592]: I0929 17:26:25.363543 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerDied","Data":"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3"} Sep 29 17:26:25 crc kubenswrapper[4592]: I0929 17:26:25.897871 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:25 crc kubenswrapper[4592]: I0929 17:26:25.897904 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:25 crc kubenswrapper[4592]: I0929 17:26:25.980196 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:26 crc kubenswrapper[4592]: I0929 17:26:26.374715 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerStarted","Data":"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e"} Sep 29 17:26:26 crc kubenswrapper[4592]: I0929 17:26:26.392041 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zsgql" podStartSLOduration=2.9194414269999998 podStartE2EDuration="9.392024371s" podCreationTimestamp="2025-09-29 17:26:17 +0000 UTC" firstStartedPulling="2025-09-29 17:26:19.303770267 +0000 UTC m=+2109.451547948" lastFinishedPulling="2025-09-29 17:26:25.776353211 +0000 UTC m=+2115.924130892" observedRunningTime="2025-09-29 17:26:26.389534703 +0000 UTC m=+2116.537312384" watchObservedRunningTime="2025-09-29 17:26:26.392024371 +0000 UTC m=+2116.539802042" Sep 29 17:26:26 crc kubenswrapper[4592]: I0929 17:26:26.434454 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:27 crc kubenswrapper[4592]: I0929 17:26:27.601058 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:28 crc kubenswrapper[4592]: I0929 17:26:28.310136 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:28 crc kubenswrapper[4592]: I0929 17:26:28.310490 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:28 crc kubenswrapper[4592]: I0929 17:26:28.391572 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vt4sw" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="registry-server" containerID="cri-o://4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019" gracePeriod=2 Sep 29 17:26:28 crc kubenswrapper[4592]: I0929 17:26:28.983253 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.146279 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content\") pod \"ac720be2-a313-400e-805b-05228f1fd9d5\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.146350 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities\") pod \"ac720be2-a313-400e-805b-05228f1fd9d5\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.146406 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74g9k\" (UniqueName: \"kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k\") pod \"ac720be2-a313-400e-805b-05228f1fd9d5\" (UID: \"ac720be2-a313-400e-805b-05228f1fd9d5\") " Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.149288 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities" (OuterVolumeSpecName: "utilities") pod "ac720be2-a313-400e-805b-05228f1fd9d5" (UID: "ac720be2-a313-400e-805b-05228f1fd9d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.154412 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k" (OuterVolumeSpecName: "kube-api-access-74g9k") pod "ac720be2-a313-400e-805b-05228f1fd9d5" (UID: "ac720be2-a313-400e-805b-05228f1fd9d5"). InnerVolumeSpecName "kube-api-access-74g9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.172480 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ac720be2-a313-400e-805b-05228f1fd9d5" (UID: "ac720be2-a313-400e-805b-05228f1fd9d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.248721 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.248754 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74g9k\" (UniqueName: \"kubernetes.io/projected/ac720be2-a313-400e-805b-05228f1fd9d5-kube-api-access-74g9k\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.248768 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ac720be2-a313-400e-805b-05228f1fd9d5-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.355863 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zsgql" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="registry-server" probeResult="failure" output=< Sep 29 17:26:29 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:26:29 crc kubenswrapper[4592]: > Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.401669 4592 generic.go:334] "Generic (PLEG): container finished" podID="ac720be2-a313-400e-805b-05228f1fd9d5" containerID="4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019" exitCode=0 Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.401728 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerDied","Data":"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019"} Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.401769 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vt4sw" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.401796 4592 scope.go:117] "RemoveContainer" containerID="4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.401784 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vt4sw" event={"ID":"ac720be2-a313-400e-805b-05228f1fd9d5","Type":"ContainerDied","Data":"646ee595a94ed7f3899d6e9fff498fa83526c426a84759958687968f3549e169"} Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.426913 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.427248 4592 scope.go:117] "RemoveContainer" containerID="33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.434832 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vt4sw"] Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.444896 4592 scope.go:117] "RemoveContainer" containerID="ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.499468 4592 scope.go:117] "RemoveContainer" containerID="4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019" Sep 29 17:26:29 crc kubenswrapper[4592]: E0929 17:26:29.499850 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019\": container with ID starting with 4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019 not found: ID does not exist" containerID="4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.499892 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019"} err="failed to get container status \"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019\": rpc error: code = NotFound desc = could not find container \"4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019\": container with ID starting with 4d95de43b50048e98008365f173987c0fe349863614bc5a145fd9f6a835a1019 not found: ID does not exist" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.499919 4592 scope.go:117] "RemoveContainer" containerID="33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8" Sep 29 17:26:29 crc kubenswrapper[4592]: E0929 17:26:29.500302 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8\": container with ID starting with 33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8 not found: ID does not exist" containerID="33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.500334 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8"} err="failed to get container status \"33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8\": rpc error: code = NotFound desc = could not find container \"33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8\": container with ID starting with 33100cf6b90858371a9da967c5e21ef3a347bca2f2306992f29c4b5d607a8fd8 not found: ID does not exist" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.500355 4592 scope.go:117] "RemoveContainer" containerID="ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1" Sep 29 17:26:29 crc kubenswrapper[4592]: E0929 17:26:29.500603 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1\": container with ID starting with ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1 not found: ID does not exist" containerID="ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1" Sep 29 17:26:29 crc kubenswrapper[4592]: I0929 17:26:29.500627 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1"} err="failed to get container status \"ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1\": rpc error: code = NotFound desc = could not find container \"ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1\": container with ID starting with ef1860c2afca99177cf31685d78493e1a82b3692632cfa0eb398055e710299c1 not found: ID does not exist" Sep 29 17:26:31 crc kubenswrapper[4592]: I0929 17:26:31.195457 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" path="/var/lib/kubelet/pods/ac720be2-a313-400e-805b-05228f1fd9d5/volumes" Sep 29 17:26:38 crc kubenswrapper[4592]: I0929 17:26:38.352399 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:38 crc kubenswrapper[4592]: I0929 17:26:38.399365 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:38 crc kubenswrapper[4592]: I0929 17:26:38.586963 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:39 crc kubenswrapper[4592]: I0929 17:26:39.497193 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zsgql" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="registry-server" containerID="cri-o://8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e" gracePeriod=2 Sep 29 17:26:39 crc kubenswrapper[4592]: I0929 17:26:39.963762 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.052228 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content\") pod \"2ef352b6-d3d1-44ff-8998-54973c5f9414\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.052288 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptlzt\" (UniqueName: \"kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt\") pod \"2ef352b6-d3d1-44ff-8998-54973c5f9414\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.052381 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities\") pod \"2ef352b6-d3d1-44ff-8998-54973c5f9414\" (UID: \"2ef352b6-d3d1-44ff-8998-54973c5f9414\") " Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.053329 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities" (OuterVolumeSpecName: "utilities") pod "2ef352b6-d3d1-44ff-8998-54973c5f9414" (UID: "2ef352b6-d3d1-44ff-8998-54973c5f9414"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.053872 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.062405 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt" (OuterVolumeSpecName: "kube-api-access-ptlzt") pod "2ef352b6-d3d1-44ff-8998-54973c5f9414" (UID: "2ef352b6-d3d1-44ff-8998-54973c5f9414"). InnerVolumeSpecName "kube-api-access-ptlzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.138326 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2ef352b6-d3d1-44ff-8998-54973c5f9414" (UID: "2ef352b6-d3d1-44ff-8998-54973c5f9414"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.156222 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ef352b6-d3d1-44ff-8998-54973c5f9414-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.156265 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptlzt\" (UniqueName: \"kubernetes.io/projected/2ef352b6-d3d1-44ff-8998-54973c5f9414-kube-api-access-ptlzt\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.513276 4592 generic.go:334] "Generic (PLEG): container finished" podID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerID="8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e" exitCode=0 Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.513319 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zsgql" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.513345 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerDied","Data":"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e"} Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.515649 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zsgql" event={"ID":"2ef352b6-d3d1-44ff-8998-54973c5f9414","Type":"ContainerDied","Data":"0ed93c96a20cbfca178fc2cc9474289feb99bfa030bf24e8ea2ba7c7a29b20d0"} Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.515675 4592 scope.go:117] "RemoveContainer" containerID="8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.547853 4592 scope.go:117] "RemoveContainer" containerID="d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.562895 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.571006 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zsgql"] Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.591676 4592 scope.go:117] "RemoveContainer" containerID="6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.625755 4592 scope.go:117] "RemoveContainer" containerID="8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e" Sep 29 17:26:40 crc kubenswrapper[4592]: E0929 17:26:40.626110 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e\": container with ID starting with 8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e not found: ID does not exist" containerID="8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.626153 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e"} err="failed to get container status \"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e\": rpc error: code = NotFound desc = could not find container \"8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e\": container with ID starting with 8c7350a84d43f241396171f292a9a67fedf99415add9436ff7ea35497f057c6e not found: ID does not exist" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.626173 4592 scope.go:117] "RemoveContainer" containerID="d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3" Sep 29 17:26:40 crc kubenswrapper[4592]: E0929 17:26:40.626353 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3\": container with ID starting with d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3 not found: ID does not exist" containerID="d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.626368 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3"} err="failed to get container status \"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3\": rpc error: code = NotFound desc = could not find container \"d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3\": container with ID starting with d9202b5e923ced42451cd4f764d889f08ce7009f524888014fca0565449f16f3 not found: ID does not exist" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.626380 4592 scope.go:117] "RemoveContainer" containerID="6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2" Sep 29 17:26:40 crc kubenswrapper[4592]: E0929 17:26:40.626518 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2\": container with ID starting with 6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2 not found: ID does not exist" containerID="6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2" Sep 29 17:26:40 crc kubenswrapper[4592]: I0929 17:26:40.626539 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2"} err="failed to get container status \"6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2\": rpc error: code = NotFound desc = could not find container \"6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2\": container with ID starting with 6827892758a2b5d1e5f9a9143b5ac7cf8e10f3cde7be4a8408b378b17a39dab2 not found: ID does not exist" Sep 29 17:26:41 crc kubenswrapper[4592]: I0929 17:26:41.208366 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" path="/var/lib/kubelet/pods/2ef352b6-d3d1-44ff-8998-54973c5f9414/volumes" Sep 29 17:26:41 crc kubenswrapper[4592]: I0929 17:26:41.536908 4592 generic.go:334] "Generic (PLEG): container finished" podID="121a0489-01a2-492b-a564-2718b687e621" containerID="e63b7cdc940debe464e30861eed6bb82cd9e1d5ae190d839af36708b308ebc1d" exitCode=0 Sep 29 17:26:41 crc kubenswrapper[4592]: I0929 17:26:41.537069 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" event={"ID":"121a0489-01a2-492b-a564-2718b687e621","Type":"ContainerDied","Data":"e63b7cdc940debe464e30861eed6bb82cd9e1d5ae190d839af36708b308ebc1d"} Sep 29 17:26:42 crc kubenswrapper[4592]: I0929 17:26:42.986111 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.115190 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key\") pod \"121a0489-01a2-492b-a564-2718b687e621\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.115287 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle\") pod \"121a0489-01a2-492b-a564-2718b687e621\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.115355 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory\") pod \"121a0489-01a2-492b-a564-2718b687e621\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.115375 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s67fq\" (UniqueName: \"kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq\") pod \"121a0489-01a2-492b-a564-2718b687e621\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.115797 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0\") pod \"121a0489-01a2-492b-a564-2718b687e621\" (UID: \"121a0489-01a2-492b-a564-2718b687e621\") " Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.120377 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq" (OuterVolumeSpecName: "kube-api-access-s67fq") pod "121a0489-01a2-492b-a564-2718b687e621" (UID: "121a0489-01a2-492b-a564-2718b687e621"). InnerVolumeSpecName "kube-api-access-s67fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.126444 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "121a0489-01a2-492b-a564-2718b687e621" (UID: "121a0489-01a2-492b-a564-2718b687e621"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.139348 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "121a0489-01a2-492b-a564-2718b687e621" (UID: "121a0489-01a2-492b-a564-2718b687e621"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.144703 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "121a0489-01a2-492b-a564-2718b687e621" (UID: "121a0489-01a2-492b-a564-2718b687e621"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.144998 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory" (OuterVolumeSpecName: "inventory") pod "121a0489-01a2-492b-a564-2718b687e621" (UID: "121a0489-01a2-492b-a564-2718b687e621"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.218304 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.218341 4592 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.218359 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/121a0489-01a2-492b-a564-2718b687e621-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.218371 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s67fq\" (UniqueName: \"kubernetes.io/projected/121a0489-01a2-492b-a564-2718b687e621-kube-api-access-s67fq\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.218384 4592 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/121a0489-01a2-492b-a564-2718b687e621-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.576007 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" event={"ID":"121a0489-01a2-492b-a564-2718b687e621","Type":"ContainerDied","Data":"72bc951c2270929c70a10b9a5cbd164e7aa9646924d111888ed92e7591630e93"} Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.576047 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72bc951c2270929c70a10b9a5cbd164e7aa9646924d111888ed92e7591630e93" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.576437 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-qdjjh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.673450 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh"] Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.673990 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="extract-utilities" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674023 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="extract-utilities" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674055 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674067 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674100 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674112 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674136 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="extract-content" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674178 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="extract-content" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674198 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="121a0489-01a2-492b-a564-2718b687e621" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674211 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="121a0489-01a2-492b-a564-2718b687e621" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674238 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="extract-utilities" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674251 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="extract-utilities" Sep 29 17:26:43 crc kubenswrapper[4592]: E0929 17:26:43.674273 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="extract-content" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674284 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="extract-content" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674549 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="121a0489-01a2-492b-a564-2718b687e621" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674597 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef352b6-d3d1-44ff-8998-54973c5f9414" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.674625 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac720be2-a313-400e-805b-05228f1fd9d5" containerName="registry-server" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.675574 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.678442 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.678754 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.679034 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.680967 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.681119 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.681328 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.697830 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh"] Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.834737 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.834790 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.834810 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.835053 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.835422 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22dwf\" (UniqueName: \"kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.835476 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937031 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937187 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22dwf\" (UniqueName: \"kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937219 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937251 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937280 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.937300 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.945040 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.945594 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.948109 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.948447 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.950450 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:43 crc kubenswrapper[4592]: I0929 17:26:43.955316 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22dwf\" (UniqueName: \"kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:44 crc kubenswrapper[4592]: I0929 17:26:44.034889 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:26:44 crc kubenswrapper[4592]: I0929 17:26:44.637407 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh"] Sep 29 17:26:45 crc kubenswrapper[4592]: I0929 17:26:45.598596 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" event={"ID":"29930a89-f89e-4db7-85e6-4f47c1033098","Type":"ContainerStarted","Data":"4bf0b414f84412224655c669c412048793b70559b803cc185e5cd71840f9156b"} Sep 29 17:26:45 crc kubenswrapper[4592]: I0929 17:26:45.599050 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" event={"ID":"29930a89-f89e-4db7-85e6-4f47c1033098","Type":"ContainerStarted","Data":"177777a446a97a75c02654b5c01679077c9c1f7af4c6695b3e92ae3509632e27"} Sep 29 17:26:45 crc kubenswrapper[4592]: I0929 17:26:45.614902 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" podStartSLOduration=2.121581805 podStartE2EDuration="2.614888093s" podCreationTimestamp="2025-09-29 17:26:43 +0000 UTC" firstStartedPulling="2025-09-29 17:26:44.652468645 +0000 UTC m=+2134.800246346" lastFinishedPulling="2025-09-29 17:26:45.145774953 +0000 UTC m=+2135.293552634" observedRunningTime="2025-09-29 17:26:45.614136693 +0000 UTC m=+2135.761914374" watchObservedRunningTime="2025-09-29 17:26:45.614888093 +0000 UTC m=+2135.762665774" Sep 29 17:27:00 crc kubenswrapper[4592]: I0929 17:27:00.883350 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:27:00 crc kubenswrapper[4592]: I0929 17:27:00.883973 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:27:30 crc kubenswrapper[4592]: I0929 17:27:30.884025 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:27:30 crc kubenswrapper[4592]: I0929 17:27:30.885083 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:27:37 crc kubenswrapper[4592]: I0929 17:27:37.053031 4592 generic.go:334] "Generic (PLEG): container finished" podID="29930a89-f89e-4db7-85e6-4f47c1033098" containerID="4bf0b414f84412224655c669c412048793b70559b803cc185e5cd71840f9156b" exitCode=0 Sep 29 17:27:37 crc kubenswrapper[4592]: I0929 17:27:37.053174 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" event={"ID":"29930a89-f89e-4db7-85e6-4f47c1033098","Type":"ContainerDied","Data":"4bf0b414f84412224655c669c412048793b70559b803cc185e5cd71840f9156b"} Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.543069 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703400 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703494 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22dwf\" (UniqueName: \"kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703529 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703617 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703694 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.703812 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory\") pod \"29930a89-f89e-4db7-85e6-4f47c1033098\" (UID: \"29930a89-f89e-4db7-85e6-4f47c1033098\") " Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.724229 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.725032 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf" (OuterVolumeSpecName: "kube-api-access-22dwf") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "kube-api-access-22dwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.732449 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory" (OuterVolumeSpecName: "inventory") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.736516 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.738052 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.756293 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "29930a89-f89e-4db7-85e6-4f47c1033098" (UID: "29930a89-f89e-4db7-85e6-4f47c1033098"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806645 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22dwf\" (UniqueName: \"kubernetes.io/projected/29930a89-f89e-4db7-85e6-4f47c1033098-kube-api-access-22dwf\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806701 4592 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806719 4592 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806734 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806746 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:38 crc kubenswrapper[4592]: I0929 17:27:38.806917 4592 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/29930a89-f89e-4db7-85e6-4f47c1033098-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.074608 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" event={"ID":"29930a89-f89e-4db7-85e6-4f47c1033098","Type":"ContainerDied","Data":"177777a446a97a75c02654b5c01679077c9c1f7af4c6695b3e92ae3509632e27"} Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.074674 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="177777a446a97a75c02654b5c01679077c9c1f7af4c6695b3e92ae3509632e27" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.074754 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.196481 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6"] Sep 29 17:27:39 crc kubenswrapper[4592]: E0929 17:27:39.197441 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29930a89-f89e-4db7-85e6-4f47c1033098" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.197544 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="29930a89-f89e-4db7-85e6-4f47c1033098" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.197944 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="29930a89-f89e-4db7-85e6-4f47c1033098" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.198972 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.206405 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.206638 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.206803 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.206981 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.207130 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.217107 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6"] Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.318384 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.318483 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w78zz\" (UniqueName: \"kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.318502 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.318516 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.318599 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.420781 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.420997 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.421054 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w78zz\" (UniqueName: \"kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.421105 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.421316 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.425909 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.425909 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.427782 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.430690 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.439164 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w78zz\" (UniqueName: \"kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:39 crc kubenswrapper[4592]: I0929 17:27:39.529922 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:27:40 crc kubenswrapper[4592]: I0929 17:27:40.073039 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6"] Sep 29 17:27:41 crc kubenswrapper[4592]: I0929 17:27:41.096976 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" event={"ID":"40c378c3-0f92-474d-aaed-f3cd105e4714","Type":"ContainerStarted","Data":"8d2c0eed184b0f962355a52036a05b2d0c520385390159390a70492133db15cb"} Sep 29 17:27:41 crc kubenswrapper[4592]: I0929 17:27:41.097289 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" event={"ID":"40c378c3-0f92-474d-aaed-f3cd105e4714","Type":"ContainerStarted","Data":"1b25cde6c738940db78492db52efb30e7beb1dbbd149c89ea2178e3736ecad5d"} Sep 29 17:27:41 crc kubenswrapper[4592]: I0929 17:27:41.123588 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" podStartSLOduration=1.530246407 podStartE2EDuration="2.123565954s" podCreationTimestamp="2025-09-29 17:27:39 +0000 UTC" firstStartedPulling="2025-09-29 17:27:40.08164385 +0000 UTC m=+2190.229421531" lastFinishedPulling="2025-09-29 17:27:40.674963397 +0000 UTC m=+2190.822741078" observedRunningTime="2025-09-29 17:27:41.115005113 +0000 UTC m=+2191.262782804" watchObservedRunningTime="2025-09-29 17:27:41.123565954 +0000 UTC m=+2191.271343655" Sep 29 17:28:00 crc kubenswrapper[4592]: I0929 17:28:00.883716 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:28:00 crc kubenswrapper[4592]: I0929 17:28:00.884371 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:28:00 crc kubenswrapper[4592]: I0929 17:28:00.884433 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:28:00 crc kubenswrapper[4592]: I0929 17:28:00.885563 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:28:00 crc kubenswrapper[4592]: I0929 17:28:00.885664 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74" gracePeriod=600 Sep 29 17:28:01 crc kubenswrapper[4592]: I0929 17:28:01.309532 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74" exitCode=0 Sep 29 17:28:01 crc kubenswrapper[4592]: I0929 17:28:01.309564 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74"} Sep 29 17:28:01 crc kubenswrapper[4592]: I0929 17:28:01.309975 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636"} Sep 29 17:28:01 crc kubenswrapper[4592]: I0929 17:28:01.309996 4592 scope.go:117] "RemoveContainer" containerID="ac5c5cdc9c013a957af51f68934854b32074f03f0c1dca120bdfbe4bec8cc1d5" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.168588 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg"] Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.170735 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.173648 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.173937 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.194441 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg"] Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.239099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.239171 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hb7m\" (UniqueName: \"kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.239229 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.341524 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.341619 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hb7m\" (UniqueName: \"kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.341751 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.343644 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.356637 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.358066 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hb7m\" (UniqueName: \"kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m\") pod \"collect-profiles-29319450-vmkkg\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:00 crc kubenswrapper[4592]: I0929 17:30:00.500858 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:01 crc kubenswrapper[4592]: I0929 17:30:01.037261 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg"] Sep 29 17:30:01 crc kubenswrapper[4592]: I0929 17:30:01.478657 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" event={"ID":"57cd85f9-6e86-40ae-9a20-7421a2399c6c","Type":"ContainerStarted","Data":"491db416994d0f2750ebdfe3ad1c0907ae0369982a523c7781c3c05da298477b"} Sep 29 17:30:01 crc kubenswrapper[4592]: I0929 17:30:01.478712 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" event={"ID":"57cd85f9-6e86-40ae-9a20-7421a2399c6c","Type":"ContainerStarted","Data":"3e91a71b4e7822cbc1e14fb9e2b2d6e8fe2f373703cfedfe251a8ae39b0ffa19"} Sep 29 17:30:01 crc kubenswrapper[4592]: I0929 17:30:01.506474 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" podStartSLOduration=1.5064503889999998 podStartE2EDuration="1.506450389s" podCreationTimestamp="2025-09-29 17:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 17:30:01.504722172 +0000 UTC m=+2331.652499883" watchObservedRunningTime="2025-09-29 17:30:01.506450389 +0000 UTC m=+2331.654228070" Sep 29 17:30:02 crc kubenswrapper[4592]: I0929 17:30:02.492062 4592 generic.go:334] "Generic (PLEG): container finished" podID="57cd85f9-6e86-40ae-9a20-7421a2399c6c" containerID="491db416994d0f2750ebdfe3ad1c0907ae0369982a523c7781c3c05da298477b" exitCode=0 Sep 29 17:30:02 crc kubenswrapper[4592]: I0929 17:30:02.492198 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" event={"ID":"57cd85f9-6e86-40ae-9a20-7421a2399c6c","Type":"ContainerDied","Data":"491db416994d0f2750ebdfe3ad1c0907ae0369982a523c7781c3c05da298477b"} Sep 29 17:30:03 crc kubenswrapper[4592]: I0929 17:30:03.848109 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.004751 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hb7m\" (UniqueName: \"kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m\") pod \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.005029 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume\") pod \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.005182 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume\") pod \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\" (UID: \"57cd85f9-6e86-40ae-9a20-7421a2399c6c\") " Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.005715 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume" (OuterVolumeSpecName: "config-volume") pod "57cd85f9-6e86-40ae-9a20-7421a2399c6c" (UID: "57cd85f9-6e86-40ae-9a20-7421a2399c6c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.022264 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "57cd85f9-6e86-40ae-9a20-7421a2399c6c" (UID: "57cd85f9-6e86-40ae-9a20-7421a2399c6c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.030458 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m" (OuterVolumeSpecName: "kube-api-access-8hb7m") pod "57cd85f9-6e86-40ae-9a20-7421a2399c6c" (UID: "57cd85f9-6e86-40ae-9a20-7421a2399c6c"). InnerVolumeSpecName "kube-api-access-8hb7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.108630 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/57cd85f9-6e86-40ae-9a20-7421a2399c6c-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.108678 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/57cd85f9-6e86-40ae-9a20-7421a2399c6c-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.108691 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hb7m\" (UniqueName: \"kubernetes.io/projected/57cd85f9-6e86-40ae-9a20-7421a2399c6c-kube-api-access-8hb7m\") on node \"crc\" DevicePath \"\"" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.285880 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s"] Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.293341 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319405-6822s"] Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.511253 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" event={"ID":"57cd85f9-6e86-40ae-9a20-7421a2399c6c","Type":"ContainerDied","Data":"3e91a71b4e7822cbc1e14fb9e2b2d6e8fe2f373703cfedfe251a8ae39b0ffa19"} Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.511293 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e91a71b4e7822cbc1e14fb9e2b2d6e8fe2f373703cfedfe251a8ae39b0ffa19" Sep 29 17:30:04 crc kubenswrapper[4592]: I0929 17:30:04.511348 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg" Sep 29 17:30:05 crc kubenswrapper[4592]: I0929 17:30:05.198666 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25870e3b-7737-4e6a-9ac7-a003d45c140b" path="/var/lib/kubelet/pods/25870e3b-7737-4e6a-9ac7-a003d45c140b/volumes" Sep 29 17:30:22 crc kubenswrapper[4592]: I0929 17:30:22.117431 4592 scope.go:117] "RemoveContainer" containerID="9b339c2682c6ca96f15ab10e4b54b6d296ad154a3bfe8f9f0c1b4c151d08d556" Sep 29 17:30:30 crc kubenswrapper[4592]: I0929 17:30:30.883291 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:30:30 crc kubenswrapper[4592]: I0929 17:30:30.883649 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:31:00 crc kubenswrapper[4592]: I0929 17:31:00.884480 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:31:00 crc kubenswrapper[4592]: I0929 17:31:00.885232 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:31:30 crc kubenswrapper[4592]: I0929 17:31:30.882845 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:31:30 crc kubenswrapper[4592]: I0929 17:31:30.883632 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:31:30 crc kubenswrapper[4592]: I0929 17:31:30.883693 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:31:30 crc kubenswrapper[4592]: I0929 17:31:30.884689 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:31:30 crc kubenswrapper[4592]: I0929 17:31:30.884781 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" gracePeriod=600 Sep 29 17:31:30 crc kubenswrapper[4592]: E0929 17:31:30.957690 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4cc986fa_6620_43ff_ae05_11c71e326035.slice/crio-b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:31:31 crc kubenswrapper[4592]: E0929 17:31:31.018739 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:31:31 crc kubenswrapper[4592]: I0929 17:31:31.285689 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" exitCode=0 Sep 29 17:31:31 crc kubenswrapper[4592]: I0929 17:31:31.285729 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636"} Sep 29 17:31:31 crc kubenswrapper[4592]: I0929 17:31:31.285761 4592 scope.go:117] "RemoveContainer" containerID="40b13a263d719db9fdf3ca3ac6947962b18e9912d31484ebaf596f663dccdd74" Sep 29 17:31:31 crc kubenswrapper[4592]: I0929 17:31:31.286411 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:31:31 crc kubenswrapper[4592]: E0929 17:31:31.286638 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:31:44 crc kubenswrapper[4592]: I0929 17:31:44.183071 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:31:44 crc kubenswrapper[4592]: E0929 17:31:44.183860 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:31:50 crc kubenswrapper[4592]: I0929 17:31:50.472070 4592 generic.go:334] "Generic (PLEG): container finished" podID="40c378c3-0f92-474d-aaed-f3cd105e4714" containerID="8d2c0eed184b0f962355a52036a05b2d0c520385390159390a70492133db15cb" exitCode=0 Sep 29 17:31:50 crc kubenswrapper[4592]: I0929 17:31:50.472611 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" event={"ID":"40c378c3-0f92-474d-aaed-f3cd105e4714","Type":"ContainerDied","Data":"8d2c0eed184b0f962355a52036a05b2d0c520385390159390a70492133db15cb"} Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.858276 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.889796 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0\") pod \"40c378c3-0f92-474d-aaed-f3cd105e4714\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.889987 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle\") pod \"40c378c3-0f92-474d-aaed-f3cd105e4714\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.890051 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key\") pod \"40c378c3-0f92-474d-aaed-f3cd105e4714\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.890105 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w78zz\" (UniqueName: \"kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz\") pod \"40c378c3-0f92-474d-aaed-f3cd105e4714\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.890227 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory\") pod \"40c378c3-0f92-474d-aaed-f3cd105e4714\" (UID: \"40c378c3-0f92-474d-aaed-f3cd105e4714\") " Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.898358 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz" (OuterVolumeSpecName: "kube-api-access-w78zz") pod "40c378c3-0f92-474d-aaed-f3cd105e4714" (UID: "40c378c3-0f92-474d-aaed-f3cd105e4714"). InnerVolumeSpecName "kube-api-access-w78zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.900317 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "40c378c3-0f92-474d-aaed-f3cd105e4714" (UID: "40c378c3-0f92-474d-aaed-f3cd105e4714"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.927538 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "40c378c3-0f92-474d-aaed-f3cd105e4714" (UID: "40c378c3-0f92-474d-aaed-f3cd105e4714"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.927800 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory" (OuterVolumeSpecName: "inventory") pod "40c378c3-0f92-474d-aaed-f3cd105e4714" (UID: "40c378c3-0f92-474d-aaed-f3cd105e4714"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.928027 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "40c378c3-0f92-474d-aaed-f3cd105e4714" (UID: "40c378c3-0f92-474d-aaed-f3cd105e4714"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.991947 4592 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.991974 4592 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.991985 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.991993 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w78zz\" (UniqueName: \"kubernetes.io/projected/40c378c3-0f92-474d-aaed-f3cd105e4714-kube-api-access-w78zz\") on node \"crc\" DevicePath \"\"" Sep 29 17:31:51 crc kubenswrapper[4592]: I0929 17:31:51.992002 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40c378c3-0f92-474d-aaed-f3cd105e4714-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.499996 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" event={"ID":"40c378c3-0f92-474d-aaed-f3cd105e4714","Type":"ContainerDied","Data":"1b25cde6c738940db78492db52efb30e7beb1dbbd149c89ea2178e3736ecad5d"} Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.500331 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b25cde6c738940db78492db52efb30e7beb1dbbd149c89ea2178e3736ecad5d" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.500404 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.610711 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr"] Sep 29 17:31:52 crc kubenswrapper[4592]: E0929 17:31:52.611573 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57cd85f9-6e86-40ae-9a20-7421a2399c6c" containerName="collect-profiles" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.611600 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="57cd85f9-6e86-40ae-9a20-7421a2399c6c" containerName="collect-profiles" Sep 29 17:31:52 crc kubenswrapper[4592]: E0929 17:31:52.611676 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c378c3-0f92-474d-aaed-f3cd105e4714" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.611688 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c378c3-0f92-474d-aaed-f3cd105e4714" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.612076 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="57cd85f9-6e86-40ae-9a20-7421a2399c6c" containerName="collect-profiles" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.612195 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="40c378c3-0f92-474d-aaed-f3cd105e4714" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.613905 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.615731 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.616203 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.616415 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.616737 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.616880 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.619919 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.620228 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.628406 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr"] Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705303 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705372 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt7hn\" (UniqueName: \"kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705432 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705447 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705656 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705887 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705937 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.705993 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.706069 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.807342 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.807640 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.807789 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.808029 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.808219 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.808620 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.808768 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.808901 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.809051 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.809281 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt7hn\" (UniqueName: \"kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.812383 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.812701 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.812730 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.812928 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.813735 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.814685 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.816220 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.829132 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt7hn\" (UniqueName: \"kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn\") pod \"nova-edpm-deployment-openstack-edpm-ipam-rrsnr\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:52 crc kubenswrapper[4592]: I0929 17:31:52.937011 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:31:53 crc kubenswrapper[4592]: I0929 17:31:53.462802 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr"] Sep 29 17:31:53 crc kubenswrapper[4592]: W0929 17:31:53.478463 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5d09077_a84b_4b69_974b_5286b27f244f.slice/crio-25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948 WatchSource:0}: Error finding container 25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948: Status 404 returned error can't find the container with id 25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948 Sep 29 17:31:53 crc kubenswrapper[4592]: I0929 17:31:53.483080 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:31:53 crc kubenswrapper[4592]: I0929 17:31:53.512249 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" event={"ID":"e5d09077-a84b-4b69-974b-5286b27f244f","Type":"ContainerStarted","Data":"25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948"} Sep 29 17:31:54 crc kubenswrapper[4592]: I0929 17:31:54.524500 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" event={"ID":"e5d09077-a84b-4b69-974b-5286b27f244f","Type":"ContainerStarted","Data":"33534597ec5ab51e9144b7604d8a6d7f4b6e12b905537239fbd2e8ede81cc323"} Sep 29 17:31:54 crc kubenswrapper[4592]: I0929 17:31:54.547200 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" podStartSLOduration=1.966309083 podStartE2EDuration="2.547177181s" podCreationTimestamp="2025-09-29 17:31:52 +0000 UTC" firstStartedPulling="2025-09-29 17:31:53.482260406 +0000 UTC m=+2443.630038097" lastFinishedPulling="2025-09-29 17:31:54.063128514 +0000 UTC m=+2444.210906195" observedRunningTime="2025-09-29 17:31:54.542966357 +0000 UTC m=+2444.690744118" watchObservedRunningTime="2025-09-29 17:31:54.547177181 +0000 UTC m=+2444.694954902" Sep 29 17:31:58 crc kubenswrapper[4592]: I0929 17:31:58.182740 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:31:58 crc kubenswrapper[4592]: E0929 17:31:58.183255 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:32:11 crc kubenswrapper[4592]: I0929 17:32:11.197605 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:32:11 crc kubenswrapper[4592]: E0929 17:32:11.198361 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:32:26 crc kubenswrapper[4592]: I0929 17:32:26.183372 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:32:26 crc kubenswrapper[4592]: E0929 17:32:26.184133 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:32:37 crc kubenswrapper[4592]: I0929 17:32:37.183659 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:32:37 crc kubenswrapper[4592]: E0929 17:32:37.184338 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.031545 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.033820 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.057677 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.057897 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.057939 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d9tp\" (UniqueName: \"kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.072035 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.160393 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.160435 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d9tp\" (UniqueName: \"kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.160507 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.161019 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.161047 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.194228 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d9tp\" (UniqueName: \"kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp\") pod \"certified-operators-xxcqh\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.402136 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:44 crc kubenswrapper[4592]: I0929 17:32:44.976946 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:32:45 crc kubenswrapper[4592]: I0929 17:32:45.957958 4592 generic.go:334] "Generic (PLEG): container finished" podID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerID="b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858" exitCode=0 Sep 29 17:32:45 crc kubenswrapper[4592]: I0929 17:32:45.958168 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerDied","Data":"b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858"} Sep 29 17:32:45 crc kubenswrapper[4592]: I0929 17:32:45.958300 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerStarted","Data":"e07d6cb91ca5ac1dd03c7005f553f9352c555be267af5c304e515d7dc96ef252"} Sep 29 17:32:47 crc kubenswrapper[4592]: I0929 17:32:47.981787 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerStarted","Data":"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03"} Sep 29 17:32:48 crc kubenswrapper[4592]: I0929 17:32:48.183036 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:32:48 crc kubenswrapper[4592]: E0929 17:32:48.183601 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:32:49 crc kubenswrapper[4592]: I0929 17:32:49.999314 4592 generic.go:334] "Generic (PLEG): container finished" podID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerID="840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03" exitCode=0 Sep 29 17:32:49 crc kubenswrapper[4592]: I0929 17:32:49.999363 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerDied","Data":"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03"} Sep 29 17:32:52 crc kubenswrapper[4592]: I0929 17:32:52.021387 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerStarted","Data":"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb"} Sep 29 17:32:52 crc kubenswrapper[4592]: I0929 17:32:52.054663 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xxcqh" podStartSLOduration=2.951323654 podStartE2EDuration="8.054635702s" podCreationTimestamp="2025-09-29 17:32:44 +0000 UTC" firstStartedPulling="2025-09-29 17:32:45.95974585 +0000 UTC m=+2496.107523531" lastFinishedPulling="2025-09-29 17:32:51.063057898 +0000 UTC m=+2501.210835579" observedRunningTime="2025-09-29 17:32:52.045173484 +0000 UTC m=+2502.192951165" watchObservedRunningTime="2025-09-29 17:32:52.054635702 +0000 UTC m=+2502.202413413" Sep 29 17:32:54 crc kubenswrapper[4592]: I0929 17:32:54.402968 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:54 crc kubenswrapper[4592]: I0929 17:32:54.403303 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:54 crc kubenswrapper[4592]: I0929 17:32:54.459136 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:32:59 crc kubenswrapper[4592]: I0929 17:32:59.183998 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:32:59 crc kubenswrapper[4592]: E0929 17:32:59.187038 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:33:04 crc kubenswrapper[4592]: I0929 17:33:04.481340 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:33:04 crc kubenswrapper[4592]: I0929 17:33:04.536000 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.141084 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xxcqh" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="registry-server" containerID="cri-o://e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb" gracePeriod=2 Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.579001 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.694764 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d9tp\" (UniqueName: \"kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp\") pod \"31467959-0cfc-4636-b428-aed5ce2bfbdd\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.694940 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content\") pod \"31467959-0cfc-4636-b428-aed5ce2bfbdd\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.695258 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities\") pod \"31467959-0cfc-4636-b428-aed5ce2bfbdd\" (UID: \"31467959-0cfc-4636-b428-aed5ce2bfbdd\") " Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.695770 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities" (OuterVolumeSpecName: "utilities") pod "31467959-0cfc-4636-b428-aed5ce2bfbdd" (UID: "31467959-0cfc-4636-b428-aed5ce2bfbdd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.706548 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp" (OuterVolumeSpecName: "kube-api-access-6d9tp") pod "31467959-0cfc-4636-b428-aed5ce2bfbdd" (UID: "31467959-0cfc-4636-b428-aed5ce2bfbdd"). InnerVolumeSpecName "kube-api-access-6d9tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.752142 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "31467959-0cfc-4636-b428-aed5ce2bfbdd" (UID: "31467959-0cfc-4636-b428-aed5ce2bfbdd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.797716 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.797766 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d9tp\" (UniqueName: \"kubernetes.io/projected/31467959-0cfc-4636-b428-aed5ce2bfbdd-kube-api-access-6d9tp\") on node \"crc\" DevicePath \"\"" Sep 29 17:33:05 crc kubenswrapper[4592]: I0929 17:33:05.797780 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/31467959-0cfc-4636-b428-aed5ce2bfbdd-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.155036 4592 generic.go:334] "Generic (PLEG): container finished" podID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerID="e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb" exitCode=0 Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.155084 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerDied","Data":"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb"} Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.155163 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xxcqh" event={"ID":"31467959-0cfc-4636-b428-aed5ce2bfbdd","Type":"ContainerDied","Data":"e07d6cb91ca5ac1dd03c7005f553f9352c555be267af5c304e515d7dc96ef252"} Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.155165 4592 scope.go:117] "RemoveContainer" containerID="e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.155235 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xxcqh" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.183511 4592 scope.go:117] "RemoveContainer" containerID="840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.219670 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.227344 4592 scope.go:117] "RemoveContainer" containerID="b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.237541 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xxcqh"] Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.274327 4592 scope.go:117] "RemoveContainer" containerID="e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb" Sep 29 17:33:06 crc kubenswrapper[4592]: E0929 17:33:06.274916 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb\": container with ID starting with e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb not found: ID does not exist" containerID="e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.274954 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb"} err="failed to get container status \"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb\": rpc error: code = NotFound desc = could not find container \"e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb\": container with ID starting with e9480bdd6acf295d6cf58c09d205b66829d68342c2a29fe506cf61ad6ea58aeb not found: ID does not exist" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.274981 4592 scope.go:117] "RemoveContainer" containerID="840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03" Sep 29 17:33:06 crc kubenswrapper[4592]: E0929 17:33:06.275496 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03\": container with ID starting with 840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03 not found: ID does not exist" containerID="840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.275518 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03"} err="failed to get container status \"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03\": rpc error: code = NotFound desc = could not find container \"840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03\": container with ID starting with 840601dbb17c48463d3ad0e16eb8dad0840c14189389546afef32f9959ddcb03 not found: ID does not exist" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.275534 4592 scope.go:117] "RemoveContainer" containerID="b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858" Sep 29 17:33:06 crc kubenswrapper[4592]: E0929 17:33:06.275804 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858\": container with ID starting with b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858 not found: ID does not exist" containerID="b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858" Sep 29 17:33:06 crc kubenswrapper[4592]: I0929 17:33:06.275863 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858"} err="failed to get container status \"b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858\": rpc error: code = NotFound desc = could not find container \"b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858\": container with ID starting with b5cfd7412348b022cf8342097b27bb9a7a7065bf733322b0578b0bcd8326e858 not found: ID does not exist" Sep 29 17:33:07 crc kubenswrapper[4592]: I0929 17:33:07.200207 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" path="/var/lib/kubelet/pods/31467959-0cfc-4636-b428-aed5ce2bfbdd/volumes" Sep 29 17:33:11 crc kubenswrapper[4592]: I0929 17:33:11.190381 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:33:11 crc kubenswrapper[4592]: E0929 17:33:11.193359 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:33:25 crc kubenswrapper[4592]: I0929 17:33:25.185187 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:33:25 crc kubenswrapper[4592]: E0929 17:33:25.185963 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:33:36 crc kubenswrapper[4592]: I0929 17:33:36.183187 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:33:36 crc kubenswrapper[4592]: E0929 17:33:36.184195 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:33:47 crc kubenswrapper[4592]: I0929 17:33:47.183939 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:33:47 crc kubenswrapper[4592]: E0929 17:33:47.186329 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:34:00 crc kubenswrapper[4592]: I0929 17:34:00.183197 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:34:00 crc kubenswrapper[4592]: E0929 17:34:00.183913 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:34:12 crc kubenswrapper[4592]: I0929 17:34:12.183822 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:34:12 crc kubenswrapper[4592]: E0929 17:34:12.184732 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:34:27 crc kubenswrapper[4592]: I0929 17:34:27.183689 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:34:27 crc kubenswrapper[4592]: E0929 17:34:27.184674 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:34:38 crc kubenswrapper[4592]: I0929 17:34:38.183671 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:34:38 crc kubenswrapper[4592]: E0929 17:34:38.184785 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:34:50 crc kubenswrapper[4592]: I0929 17:34:50.184111 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:34:50 crc kubenswrapper[4592]: E0929 17:34:50.184963 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:01 crc kubenswrapper[4592]: I0929 17:35:01.189121 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:35:01 crc kubenswrapper[4592]: E0929 17:35:01.189830 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:15 crc kubenswrapper[4592]: I0929 17:35:15.186628 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:35:15 crc kubenswrapper[4592]: E0929 17:35:15.187545 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:15 crc kubenswrapper[4592]: I0929 17:35:15.440682 4592 generic.go:334] "Generic (PLEG): container finished" podID="e5d09077-a84b-4b69-974b-5286b27f244f" containerID="33534597ec5ab51e9144b7604d8a6d7f4b6e12b905537239fbd2e8ede81cc323" exitCode=0 Sep 29 17:35:15 crc kubenswrapper[4592]: I0929 17:35:15.440735 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" event={"ID":"e5d09077-a84b-4b69-974b-5286b27f244f","Type":"ContainerDied","Data":"33534597ec5ab51e9144b7604d8a6d7f4b6e12b905537239fbd2e8ede81cc323"} Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.928014 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973204 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973262 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973311 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973350 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973373 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973439 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973536 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt7hn\" (UniqueName: \"kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973667 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.973687 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0\") pod \"e5d09077-a84b-4b69-974b-5286b27f244f\" (UID: \"e5d09077-a84b-4b69-974b-5286b27f244f\") " Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.983845 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:16 crc kubenswrapper[4592]: I0929 17:35:16.986399 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn" (OuterVolumeSpecName: "kube-api-access-jt7hn") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "kube-api-access-jt7hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.018283 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.023116 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.028821 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.029078 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory" (OuterVolumeSpecName: "inventory") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.030869 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.042693 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.047349 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "e5d09077-a84b-4b69-974b-5286b27f244f" (UID: "e5d09077-a84b-4b69-974b-5286b27f244f"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.076752 4592 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.076898 4592 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.076985 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077328 4592 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077423 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077519 4592 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/e5d09077-a84b-4b69-974b-5286b27f244f-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077618 4592 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077704 4592 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/e5d09077-a84b-4b69-974b-5286b27f244f-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.077778 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt7hn\" (UniqueName: \"kubernetes.io/projected/e5d09077-a84b-4b69-974b-5286b27f244f-kube-api-access-jt7hn\") on node \"crc\" DevicePath \"\"" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.460202 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" event={"ID":"e5d09077-a84b-4b69-974b-5286b27f244f","Type":"ContainerDied","Data":"25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948"} Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.460554 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25c534b5c6d85bb0863a22761f437ad3282ff5f21e52adf73eaa6aa2266ee948" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.460223 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-rrsnr" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631046 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h"] Sep 29 17:35:17 crc kubenswrapper[4592]: E0929 17:35:17.631511 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d09077-a84b-4b69-974b-5286b27f244f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631533 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d09077-a84b-4b69-974b-5286b27f244f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 17:35:17 crc kubenswrapper[4592]: E0929 17:35:17.631567 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="extract-utilities" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631577 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="extract-utilities" Sep 29 17:35:17 crc kubenswrapper[4592]: E0929 17:35:17.631593 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="registry-server" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631600 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="registry-server" Sep 29 17:35:17 crc kubenswrapper[4592]: E0929 17:35:17.631624 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="extract-content" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631630 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="extract-content" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631844 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d09077-a84b-4b69-974b-5286b27f244f" containerName="nova-edpm-deployment-openstack-edpm-ipam" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.631878 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="31467959-0cfc-4636-b428-aed5ce2bfbdd" containerName="registry-server" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.632632 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.635547 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.635761 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-x5ss6" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.635698 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.636372 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.643798 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.651520 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h"] Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.689628 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdr66\" (UniqueName: \"kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.689761 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.689817 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.689863 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.689996 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.690069 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.690174 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791669 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdr66\" (UniqueName: \"kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791736 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791765 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791792 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791877 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791911 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.791948 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.797375 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.797592 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.797961 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.798448 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.799893 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.800133 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:17 crc kubenswrapper[4592]: I0929 17:35:17.811927 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdr66\" (UniqueName: \"kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:18 crc kubenswrapper[4592]: I0929 17:35:18.019929 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:35:18 crc kubenswrapper[4592]: I0929 17:35:18.606788 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h"] Sep 29 17:35:19 crc kubenswrapper[4592]: I0929 17:35:19.503186 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" event={"ID":"d76cbdef-0253-4fd5-abc2-bec6b0b6df81","Type":"ContainerStarted","Data":"eabd791d6ba9fe18eea7184e2daab4bde09fe4b5707fceb83fbd605ddfab7896"} Sep 29 17:35:20 crc kubenswrapper[4592]: I0929 17:35:20.517124 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" event={"ID":"d76cbdef-0253-4fd5-abc2-bec6b0b6df81","Type":"ContainerStarted","Data":"2b3c03f181cd54a9ebf7c7f4be39ed01885cdb152a6bdae7790a4a5dddbd1fd1"} Sep 29 17:35:30 crc kubenswrapper[4592]: I0929 17:35:30.183216 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:35:30 crc kubenswrapper[4592]: E0929 17:35:30.183898 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:42 crc kubenswrapper[4592]: I0929 17:35:42.183623 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:35:42 crc kubenswrapper[4592]: E0929 17:35:42.184397 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.732602 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" podStartSLOduration=38.506209597 podStartE2EDuration="39.732586881s" podCreationTimestamp="2025-09-29 17:35:17 +0000 UTC" firstStartedPulling="2025-09-29 17:35:18.610175549 +0000 UTC m=+2648.757953230" lastFinishedPulling="2025-09-29 17:35:19.836552833 +0000 UTC m=+2649.984330514" observedRunningTime="2025-09-29 17:35:20.536164596 +0000 UTC m=+2650.683942277" watchObservedRunningTime="2025-09-29 17:35:56.732586881 +0000 UTC m=+2686.880364562" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.735588 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.737436 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.752404 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.801838 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.801892 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.801991 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dsk8\" (UniqueName: \"kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.904361 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dsk8\" (UniqueName: \"kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.904515 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.904551 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.905027 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.905097 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:56 crc kubenswrapper[4592]: I0929 17:35:56.923927 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dsk8\" (UniqueName: \"kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8\") pod \"community-operators-mf6fs\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:57 crc kubenswrapper[4592]: I0929 17:35:57.061214 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:35:57 crc kubenswrapper[4592]: I0929 17:35:57.183424 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:35:57 crc kubenswrapper[4592]: E0929 17:35:57.183939 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:35:57 crc kubenswrapper[4592]: I0929 17:35:57.769091 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:35:57 crc kubenswrapper[4592]: I0929 17:35:57.886719 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerStarted","Data":"e7c0829dd086bf2c0cd1d14f63a8bde04e59fd2fea4515c5b3bdfe95aad997b3"} Sep 29 17:35:58 crc kubenswrapper[4592]: I0929 17:35:58.897132 4592 generic.go:334] "Generic (PLEG): container finished" podID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerID="9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3" exitCode=0 Sep 29 17:35:58 crc kubenswrapper[4592]: I0929 17:35:58.897257 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerDied","Data":"9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3"} Sep 29 17:35:59 crc kubenswrapper[4592]: I0929 17:35:59.911016 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerStarted","Data":"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49"} Sep 29 17:36:01 crc kubenswrapper[4592]: I0929 17:36:01.927640 4592 generic.go:334] "Generic (PLEG): container finished" podID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerID="98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49" exitCode=0 Sep 29 17:36:01 crc kubenswrapper[4592]: I0929 17:36:01.927729 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerDied","Data":"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49"} Sep 29 17:36:02 crc kubenswrapper[4592]: I0929 17:36:02.941971 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerStarted","Data":"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c"} Sep 29 17:36:02 crc kubenswrapper[4592]: I0929 17:36:02.960581 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mf6fs" podStartSLOduration=3.39772813 podStartE2EDuration="6.960565292s" podCreationTimestamp="2025-09-29 17:35:56 +0000 UTC" firstStartedPulling="2025-09-29 17:35:58.899537518 +0000 UTC m=+2689.047315199" lastFinishedPulling="2025-09-29 17:36:02.46237468 +0000 UTC m=+2692.610152361" observedRunningTime="2025-09-29 17:36:02.959213595 +0000 UTC m=+2693.106991296" watchObservedRunningTime="2025-09-29 17:36:02.960565292 +0000 UTC m=+2693.108342973" Sep 29 17:36:07 crc kubenswrapper[4592]: I0929 17:36:07.061869 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:07 crc kubenswrapper[4592]: I0929 17:36:07.063202 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:07 crc kubenswrapper[4592]: I0929 17:36:07.117492 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:08 crc kubenswrapper[4592]: I0929 17:36:08.046085 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:08 crc kubenswrapper[4592]: I0929 17:36:08.097223 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:36:08 crc kubenswrapper[4592]: I0929 17:36:08.183403 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:36:08 crc kubenswrapper[4592]: E0929 17:36:08.183674 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.019792 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mf6fs" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="registry-server" containerID="cri-o://89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c" gracePeriod=2 Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.511633 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.609814 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities\") pod \"3c2e82f0-0742-4635-956c-e269a37f65eb\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.610043 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content\") pod \"3c2e82f0-0742-4635-956c-e269a37f65eb\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.610123 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dsk8\" (UniqueName: \"kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8\") pod \"3c2e82f0-0742-4635-956c-e269a37f65eb\" (UID: \"3c2e82f0-0742-4635-956c-e269a37f65eb\") " Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.610648 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities" (OuterVolumeSpecName: "utilities") pod "3c2e82f0-0742-4635-956c-e269a37f65eb" (UID: "3c2e82f0-0742-4635-956c-e269a37f65eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.611258 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.615995 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8" (OuterVolumeSpecName: "kube-api-access-2dsk8") pod "3c2e82f0-0742-4635-956c-e269a37f65eb" (UID: "3c2e82f0-0742-4635-956c-e269a37f65eb"). InnerVolumeSpecName "kube-api-access-2dsk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.660844 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c2e82f0-0742-4635-956c-e269a37f65eb" (UID: "3c2e82f0-0742-4635-956c-e269a37f65eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.713207 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c2e82f0-0742-4635-956c-e269a37f65eb-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:36:10 crc kubenswrapper[4592]: I0929 17:36:10.713242 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dsk8\" (UniqueName: \"kubernetes.io/projected/3c2e82f0-0742-4635-956c-e269a37f65eb-kube-api-access-2dsk8\") on node \"crc\" DevicePath \"\"" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.032714 4592 generic.go:334] "Generic (PLEG): container finished" podID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerID="89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c" exitCode=0 Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.032753 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerDied","Data":"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c"} Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.032777 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mf6fs" event={"ID":"3c2e82f0-0742-4635-956c-e269a37f65eb","Type":"ContainerDied","Data":"e7c0829dd086bf2c0cd1d14f63a8bde04e59fd2fea4515c5b3bdfe95aad997b3"} Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.032793 4592 scope.go:117] "RemoveContainer" containerID="89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.032880 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mf6fs" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.083928 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.084498 4592 scope.go:117] "RemoveContainer" containerID="98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.111003 4592 scope.go:117] "RemoveContainer" containerID="9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.117697 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mf6fs"] Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.164843 4592 scope.go:117] "RemoveContainer" containerID="89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c" Sep 29 17:36:11 crc kubenswrapper[4592]: E0929 17:36:11.165564 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c\": container with ID starting with 89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c not found: ID does not exist" containerID="89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.165597 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c"} err="failed to get container status \"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c\": rpc error: code = NotFound desc = could not find container \"89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c\": container with ID starting with 89436884864c40d8499ba7429df1962b593b032628746412df564766bfc30a1c not found: ID does not exist" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.165620 4592 scope.go:117] "RemoveContainer" containerID="98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49" Sep 29 17:36:11 crc kubenswrapper[4592]: E0929 17:36:11.165828 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49\": container with ID starting with 98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49 not found: ID does not exist" containerID="98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.165848 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49"} err="failed to get container status \"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49\": rpc error: code = NotFound desc = could not find container \"98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49\": container with ID starting with 98cc47c90160a78925cde05f3653e5e7bdf16b30620a43abfff8243cef1d5c49 not found: ID does not exist" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.165865 4592 scope.go:117] "RemoveContainer" containerID="9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3" Sep 29 17:36:11 crc kubenswrapper[4592]: E0929 17:36:11.166046 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3\": container with ID starting with 9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3 not found: ID does not exist" containerID="9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.166066 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3"} err="failed to get container status \"9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3\": rpc error: code = NotFound desc = could not find container \"9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3\": container with ID starting with 9a1d7e7668c3a551b8b0acffd71aaf29a5f60ec0e704af73cbb25e49313e85a3 not found: ID does not exist" Sep 29 17:36:11 crc kubenswrapper[4592]: I0929 17:36:11.202028 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" path="/var/lib/kubelet/pods/3c2e82f0-0742-4635-956c-e269a37f65eb/volumes" Sep 29 17:36:23 crc kubenswrapper[4592]: I0929 17:36:23.184025 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:36:23 crc kubenswrapper[4592]: E0929 17:36:23.185132 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:36:34 crc kubenswrapper[4592]: I0929 17:36:34.183622 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:36:35 crc kubenswrapper[4592]: I0929 17:36:35.254021 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978"} Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.416973 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:36:40 crc kubenswrapper[4592]: E0929 17:36:40.418028 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="extract-utilities" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.418044 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="extract-utilities" Sep 29 17:36:40 crc kubenswrapper[4592]: E0929 17:36:40.418095 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="extract-content" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.418108 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="extract-content" Sep 29 17:36:40 crc kubenswrapper[4592]: E0929 17:36:40.418125 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="registry-server" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.418133 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="registry-server" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.418450 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c2e82f0-0742-4635-956c-e269a37f65eb" containerName="registry-server" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.420703 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.430011 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.515348 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.515410 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76v2h\" (UniqueName: \"kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.515465 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.617271 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.617517 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.617557 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76v2h\" (UniqueName: \"kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.617892 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.618252 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.638195 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76v2h\" (UniqueName: \"kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h\") pod \"redhat-marketplace-fjvh2\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:40 crc kubenswrapper[4592]: I0929 17:36:40.755070 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:41 crc kubenswrapper[4592]: I0929 17:36:41.234544 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:36:41 crc kubenswrapper[4592]: I0929 17:36:41.329182 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerStarted","Data":"b7b69d0085649ab622a1a46196c1bd778f7d8a34ac02c83e15f7fa21f61e2eb7"} Sep 29 17:36:42 crc kubenswrapper[4592]: I0929 17:36:42.342424 4592 generic.go:334] "Generic (PLEG): container finished" podID="712098e8-611c-4e43-9542-c5f288cacb2c" containerID="38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3" exitCode=0 Sep 29 17:36:42 crc kubenswrapper[4592]: I0929 17:36:42.342840 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerDied","Data":"38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3"} Sep 29 17:36:44 crc kubenswrapper[4592]: I0929 17:36:44.366694 4592 generic.go:334] "Generic (PLEG): container finished" podID="712098e8-611c-4e43-9542-c5f288cacb2c" containerID="e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71" exitCode=0 Sep 29 17:36:44 crc kubenswrapper[4592]: I0929 17:36:44.366837 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerDied","Data":"e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71"} Sep 29 17:36:45 crc kubenswrapper[4592]: I0929 17:36:45.380606 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerStarted","Data":"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0"} Sep 29 17:36:45 crc kubenswrapper[4592]: I0929 17:36:45.407731 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fjvh2" podStartSLOduration=2.982396429 podStartE2EDuration="5.40771164s" podCreationTimestamp="2025-09-29 17:36:40 +0000 UTC" firstStartedPulling="2025-09-29 17:36:42.344103147 +0000 UTC m=+2732.491880838" lastFinishedPulling="2025-09-29 17:36:44.769418348 +0000 UTC m=+2734.917196049" observedRunningTime="2025-09-29 17:36:45.399100165 +0000 UTC m=+2735.546877846" watchObservedRunningTime="2025-09-29 17:36:45.40771164 +0000 UTC m=+2735.555489321" Sep 29 17:36:50 crc kubenswrapper[4592]: I0929 17:36:50.756077 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:50 crc kubenswrapper[4592]: I0929 17:36:50.756668 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:50 crc kubenswrapper[4592]: I0929 17:36:50.821910 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:51 crc kubenswrapper[4592]: I0929 17:36:51.485930 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.670356 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.673605 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.697137 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.797369 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2hzv\" (UniqueName: \"kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.797437 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.797489 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.900012 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2hzv\" (UniqueName: \"kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.900336 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.900450 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.901041 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.901079 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.923314 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2hzv\" (UniqueName: \"kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv\") pod \"redhat-operators-qc5wr\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:54 crc kubenswrapper[4592]: I0929 17:36:54.999383 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:36:55 crc kubenswrapper[4592]: I0929 17:36:55.524848 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:36:55 crc kubenswrapper[4592]: W0929 17:36:55.537468 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd53504b2_ba91_4098_bbe7_b9cdfb18d16e.slice/crio-537f46b665d3f6536f2335114ca86bb7aab2fdfb01ac8a90b9a677a8425f6a08 WatchSource:0}: Error finding container 537f46b665d3f6536f2335114ca86bb7aab2fdfb01ac8a90b9a677a8425f6a08: Status 404 returned error can't find the container with id 537f46b665d3f6536f2335114ca86bb7aab2fdfb01ac8a90b9a677a8425f6a08 Sep 29 17:36:56 crc kubenswrapper[4592]: I0929 17:36:56.475264 4592 generic.go:334] "Generic (PLEG): container finished" podID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerID="e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416" exitCode=0 Sep 29 17:36:56 crc kubenswrapper[4592]: I0929 17:36:56.475547 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerDied","Data":"e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416"} Sep 29 17:36:56 crc kubenswrapper[4592]: I0929 17:36:56.475572 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerStarted","Data":"537f46b665d3f6536f2335114ca86bb7aab2fdfb01ac8a90b9a677a8425f6a08"} Sep 29 17:36:56 crc kubenswrapper[4592]: I0929 17:36:56.477726 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:36:58 crc kubenswrapper[4592]: I0929 17:36:58.496894 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerStarted","Data":"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa"} Sep 29 17:36:59 crc kubenswrapper[4592]: I0929 17:36:59.662869 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:36:59 crc kubenswrapper[4592]: I0929 17:36:59.663865 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fjvh2" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="registry-server" containerID="cri-o://d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0" gracePeriod=2 Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.158187 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.299908 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities\") pod \"712098e8-611c-4e43-9542-c5f288cacb2c\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.300025 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content\") pod \"712098e8-611c-4e43-9542-c5f288cacb2c\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.300188 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76v2h\" (UniqueName: \"kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h\") pod \"712098e8-611c-4e43-9542-c5f288cacb2c\" (UID: \"712098e8-611c-4e43-9542-c5f288cacb2c\") " Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.301141 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities" (OuterVolumeSpecName: "utilities") pod "712098e8-611c-4e43-9542-c5f288cacb2c" (UID: "712098e8-611c-4e43-9542-c5f288cacb2c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.306656 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h" (OuterVolumeSpecName: "kube-api-access-76v2h") pod "712098e8-611c-4e43-9542-c5f288cacb2c" (UID: "712098e8-611c-4e43-9542-c5f288cacb2c"). InnerVolumeSpecName "kube-api-access-76v2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.312896 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "712098e8-611c-4e43-9542-c5f288cacb2c" (UID: "712098e8-611c-4e43-9542-c5f288cacb2c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.402267 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76v2h\" (UniqueName: \"kubernetes.io/projected/712098e8-611c-4e43-9542-c5f288cacb2c-kube-api-access-76v2h\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.402295 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.402305 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/712098e8-611c-4e43-9542-c5f288cacb2c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.516495 4592 generic.go:334] "Generic (PLEG): container finished" podID="712098e8-611c-4e43-9542-c5f288cacb2c" containerID="d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0" exitCode=0 Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.516545 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerDied","Data":"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0"} Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.516579 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fjvh2" event={"ID":"712098e8-611c-4e43-9542-c5f288cacb2c","Type":"ContainerDied","Data":"b7b69d0085649ab622a1a46196c1bd778f7d8a34ac02c83e15f7fa21f61e2eb7"} Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.516607 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fjvh2" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.516614 4592 scope.go:117] "RemoveContainer" containerID="d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.539136 4592 scope.go:117] "RemoveContainer" containerID="e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.555847 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.566165 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fjvh2"] Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.589037 4592 scope.go:117] "RemoveContainer" containerID="38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.612712 4592 scope.go:117] "RemoveContainer" containerID="d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0" Sep 29 17:37:00 crc kubenswrapper[4592]: E0929 17:37:00.613073 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0\": container with ID starting with d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0 not found: ID does not exist" containerID="d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.613101 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0"} err="failed to get container status \"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0\": rpc error: code = NotFound desc = could not find container \"d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0\": container with ID starting with d738e6bfc4707573766c2ee630be82cc367336d3c8284df7df1731d39cae95d0 not found: ID does not exist" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.613123 4592 scope.go:117] "RemoveContainer" containerID="e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71" Sep 29 17:37:00 crc kubenswrapper[4592]: E0929 17:37:00.613536 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71\": container with ID starting with e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71 not found: ID does not exist" containerID="e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.613563 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71"} err="failed to get container status \"e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71\": rpc error: code = NotFound desc = could not find container \"e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71\": container with ID starting with e2d845dfb603cb55a3ce7b7960054ce91559c4c335da934e1262694166666f71 not found: ID does not exist" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.613577 4592 scope.go:117] "RemoveContainer" containerID="38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3" Sep 29 17:37:00 crc kubenswrapper[4592]: E0929 17:37:00.613950 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3\": container with ID starting with 38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3 not found: ID does not exist" containerID="38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3" Sep 29 17:37:00 crc kubenswrapper[4592]: I0929 17:37:00.613972 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3"} err="failed to get container status \"38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3\": rpc error: code = NotFound desc = could not find container \"38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3\": container with ID starting with 38cc29e4020e5685655013e8085d6261ab743a4275d040895e840ac20dc303a3 not found: ID does not exist" Sep 29 17:37:01 crc kubenswrapper[4592]: I0929 17:37:01.193610 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" path="/var/lib/kubelet/pods/712098e8-611c-4e43-9542-c5f288cacb2c/volumes" Sep 29 17:37:01 crc kubenswrapper[4592]: I0929 17:37:01.529983 4592 generic.go:334] "Generic (PLEG): container finished" podID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerID="70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa" exitCode=0 Sep 29 17:37:01 crc kubenswrapper[4592]: I0929 17:37:01.530026 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerDied","Data":"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa"} Sep 29 17:37:02 crc kubenswrapper[4592]: I0929 17:37:02.540579 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerStarted","Data":"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793"} Sep 29 17:37:02 crc kubenswrapper[4592]: I0929 17:37:02.563995 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-qc5wr" podStartSLOduration=2.827135383 podStartE2EDuration="8.563980074s" podCreationTimestamp="2025-09-29 17:36:54 +0000 UTC" firstStartedPulling="2025-09-29 17:36:56.477414358 +0000 UTC m=+2746.625192039" lastFinishedPulling="2025-09-29 17:37:02.214259049 +0000 UTC m=+2752.362036730" observedRunningTime="2025-09-29 17:37:02.562604506 +0000 UTC m=+2752.710382187" watchObservedRunningTime="2025-09-29 17:37:02.563980074 +0000 UTC m=+2752.711757755" Sep 29 17:37:05 crc kubenswrapper[4592]: I0929 17:37:05.000232 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:05 crc kubenswrapper[4592]: I0929 17:37:05.000627 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:06 crc kubenswrapper[4592]: I0929 17:37:06.055781 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-qc5wr" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="registry-server" probeResult="failure" output=< Sep 29 17:37:06 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:37:06 crc kubenswrapper[4592]: > Sep 29 17:37:15 crc kubenswrapper[4592]: I0929 17:37:15.048770 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:15 crc kubenswrapper[4592]: I0929 17:37:15.109595 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:15 crc kubenswrapper[4592]: I0929 17:37:15.285628 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:37:16 crc kubenswrapper[4592]: I0929 17:37:16.662442 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-qc5wr" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="registry-server" containerID="cri-o://0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793" gracePeriod=2 Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.082986 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.120694 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content\") pod \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.205794 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d53504b2-ba91-4098-bbe7-b9cdfb18d16e" (UID: "d53504b2-ba91-4098-bbe7-b9cdfb18d16e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.222174 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities\") pod \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.222788 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2hzv\" (UniqueName: \"kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv\") pod \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\" (UID: \"d53504b2-ba91-4098-bbe7-b9cdfb18d16e\") " Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.222942 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities" (OuterVolumeSpecName: "utilities") pod "d53504b2-ba91-4098-bbe7-b9cdfb18d16e" (UID: "d53504b2-ba91-4098-bbe7-b9cdfb18d16e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.224021 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.224040 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.229332 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv" (OuterVolumeSpecName: "kube-api-access-z2hzv") pod "d53504b2-ba91-4098-bbe7-b9cdfb18d16e" (UID: "d53504b2-ba91-4098-bbe7-b9cdfb18d16e"). InnerVolumeSpecName "kube-api-access-z2hzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.324916 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2hzv\" (UniqueName: \"kubernetes.io/projected/d53504b2-ba91-4098-bbe7-b9cdfb18d16e-kube-api-access-z2hzv\") on node \"crc\" DevicePath \"\"" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.672885 4592 generic.go:334] "Generic (PLEG): container finished" podID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerID="0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793" exitCode=0 Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.672929 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerDied","Data":"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793"} Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.672954 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-qc5wr" event={"ID":"d53504b2-ba91-4098-bbe7-b9cdfb18d16e","Type":"ContainerDied","Data":"537f46b665d3f6536f2335114ca86bb7aab2fdfb01ac8a90b9a677a8425f6a08"} Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.672969 4592 scope.go:117] "RemoveContainer" containerID="0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.673090 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-qc5wr" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.707698 4592 scope.go:117] "RemoveContainer" containerID="70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.711276 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.725445 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-qc5wr"] Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.753722 4592 scope.go:117] "RemoveContainer" containerID="e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.815763 4592 scope.go:117] "RemoveContainer" containerID="0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793" Sep 29 17:37:17 crc kubenswrapper[4592]: E0929 17:37:17.816365 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793\": container with ID starting with 0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793 not found: ID does not exist" containerID="0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.816413 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793"} err="failed to get container status \"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793\": rpc error: code = NotFound desc = could not find container \"0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793\": container with ID starting with 0e0029e17d3735c6217d9147829d3f7694db6b1dd8d302ff9a0ef9eede527793 not found: ID does not exist" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.816515 4592 scope.go:117] "RemoveContainer" containerID="70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa" Sep 29 17:37:17 crc kubenswrapper[4592]: E0929 17:37:17.816765 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa\": container with ID starting with 70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa not found: ID does not exist" containerID="70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.816791 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa"} err="failed to get container status \"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa\": rpc error: code = NotFound desc = could not find container \"70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa\": container with ID starting with 70f050ffe57b35872f427665770f9a1fff7c78d2a12c9f3b40caa9681689f0fa not found: ID does not exist" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.816807 4592 scope.go:117] "RemoveContainer" containerID="e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416" Sep 29 17:37:17 crc kubenswrapper[4592]: E0929 17:37:17.817022 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416\": container with ID starting with e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416 not found: ID does not exist" containerID="e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416" Sep 29 17:37:17 crc kubenswrapper[4592]: I0929 17:37:17.817051 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416"} err="failed to get container status \"e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416\": rpc error: code = NotFound desc = could not find container \"e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416\": container with ID starting with e73fbfcc02d7c11a7f7cd80e9fca9847d3f659359144b03d9489ef7b9d6cd416 not found: ID does not exist" Sep 29 17:37:19 crc kubenswrapper[4592]: I0929 17:37:19.195659 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" path="/var/lib/kubelet/pods/d53504b2-ba91-4098-bbe7-b9cdfb18d16e/volumes" Sep 29 17:38:39 crc kubenswrapper[4592]: I0929 17:38:39.521687 4592 generic.go:334] "Generic (PLEG): container finished" podID="d76cbdef-0253-4fd5-abc2-bec6b0b6df81" containerID="2b3c03f181cd54a9ebf7c7f4be39ed01885cdb152a6bdae7790a4a5dddbd1fd1" exitCode=0 Sep 29 17:38:39 crc kubenswrapper[4592]: I0929 17:38:39.521807 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" event={"ID":"d76cbdef-0253-4fd5-abc2-bec6b0b6df81","Type":"ContainerDied","Data":"2b3c03f181cd54a9ebf7c7f4be39ed01885cdb152a6bdae7790a4a5dddbd1fd1"} Sep 29 17:38:40 crc kubenswrapper[4592]: I0929 17:38:40.944264 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.072949 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073016 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073314 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073519 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdr66\" (UniqueName: \"kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073584 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073626 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.073748 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory\") pod \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\" (UID: \"d76cbdef-0253-4fd5-abc2-bec6b0b6df81\") " Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.079096 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66" (OuterVolumeSpecName: "kube-api-access-jdr66") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "kube-api-access-jdr66". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.079666 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.104224 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory" (OuterVolumeSpecName: "inventory") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.113626 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.117180 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.120756 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.127677 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "d76cbdef-0253-4fd5-abc2-bec6b0b6df81" (UID: "d76cbdef-0253-4fd5-abc2-bec6b0b6df81"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.175714 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdr66\" (UniqueName: \"kubernetes.io/projected/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-kube-api-access-jdr66\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.176183 4592 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.179002 4592 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.179033 4592 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-inventory\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.179044 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.179055 4592 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.179066 4592 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d76cbdef-0253-4fd5-abc2-bec6b0b6df81-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.544640 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" event={"ID":"d76cbdef-0253-4fd5-abc2-bec6b0b6df81","Type":"ContainerDied","Data":"eabd791d6ba9fe18eea7184e2daab4bde09fe4b5707fceb83fbd605ddfab7896"} Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.544680 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eabd791d6ba9fe18eea7184e2daab4bde09fe4b5707fceb83fbd605ddfab7896" Sep 29 17:38:41 crc kubenswrapper[4592]: I0929 17:38:41.544747 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h" Sep 29 17:39:00 crc kubenswrapper[4592]: I0929 17:39:00.884071 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:39:00 crc kubenswrapper[4592]: I0929 17:39:00.884659 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:39:30 crc kubenswrapper[4592]: I0929 17:39:30.884027 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:39:30 crc kubenswrapper[4592]: I0929 17:39:30.884672 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.422049 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423484 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423503 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423512 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="extract-utilities" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423520 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="extract-utilities" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423554 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="extract-content" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423562 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="extract-content" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423583 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="extract-utilities" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423590 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="extract-utilities" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423599 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d76cbdef-0253-4fd5-abc2-bec6b0b6df81" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423609 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d76cbdef-0253-4fd5-abc2-bec6b0b6df81" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423617 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423624 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: E0929 17:39:39.423647 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="extract-content" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423655 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="extract-content" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423908 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d76cbdef-0253-4fd5-abc2-bec6b0b6df81" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423939 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="712098e8-611c-4e43-9542-c5f288cacb2c" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.423960 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="d53504b2-ba91-4098-bbe7-b9cdfb18d16e" containerName="registry-server" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.424645 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.426629 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-7lmcp" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.426827 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.426921 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.431687 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.437626 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.509413 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.509457 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.509572 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611397 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611438 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611463 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611549 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611585 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611612 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611638 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611750 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.611787 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v7fx\" (UniqueName: \"kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.613098 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.613436 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.620599 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713406 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713517 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713568 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v7fx\" (UniqueName: \"kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713616 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713673 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713777 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.713957 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.714192 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.714809 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.718241 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.719973 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.730142 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v7fx\" (UniqueName: \"kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.742727 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"tempest-tests-tempest\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " pod="openstack/tempest-tests-tempest" Sep 29 17:39:39 crc kubenswrapper[4592]: I0929 17:39:39.749010 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 17:39:40 crc kubenswrapper[4592]: I0929 17:39:40.205647 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Sep 29 17:39:40 crc kubenswrapper[4592]: W0929 17:39:40.232387 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f57b8e4_0399_410d_a4ae_14451f3832f2.slice/crio-b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745 WatchSource:0}: Error finding container b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745: Status 404 returned error can't find the container with id b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745 Sep 29 17:39:41 crc kubenswrapper[4592]: I0929 17:39:41.115884 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1f57b8e4-0399-410d-a4ae-14451f3832f2","Type":"ContainerStarted","Data":"b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745"} Sep 29 17:40:00 crc kubenswrapper[4592]: I0929 17:40:00.882886 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:40:00 crc kubenswrapper[4592]: I0929 17:40:00.883433 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:40:00 crc kubenswrapper[4592]: I0929 17:40:00.883477 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:40:00 crc kubenswrapper[4592]: I0929 17:40:00.884234 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:40:00 crc kubenswrapper[4592]: I0929 17:40:00.884298 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978" gracePeriod=600 Sep 29 17:40:02 crc kubenswrapper[4592]: I0929 17:40:02.337020 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978" exitCode=0 Sep 29 17:40:02 crc kubenswrapper[4592]: I0929 17:40:02.337093 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978"} Sep 29 17:40:02 crc kubenswrapper[4592]: I0929 17:40:02.338830 4592 scope.go:117] "RemoveContainer" containerID="b16a134874873a35765af6de7e1fde6811a273676b1e73135a716bf48b4ee636" Sep 29 17:40:10 crc kubenswrapper[4592]: E0929 17:40:10.272920 4592 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Sep 29 17:40:10 crc kubenswrapper[4592]: E0929 17:40:10.277925 4592 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7v7fx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(1f57b8e4-0399-410d-a4ae-14451f3832f2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Sep 29 17:40:10 crc kubenswrapper[4592]: E0929 17:40:10.279490 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="1f57b8e4-0399-410d-a4ae-14451f3832f2" Sep 29 17:40:10 crc kubenswrapper[4592]: I0929 17:40:10.413224 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8"} Sep 29 17:40:10 crc kubenswrapper[4592]: E0929 17:40:10.414447 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="1f57b8e4-0399-410d-a4ae-14451f3832f2" Sep 29 17:40:24 crc kubenswrapper[4592]: I0929 17:40:24.788707 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Sep 29 17:40:26 crc kubenswrapper[4592]: I0929 17:40:26.584605 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1f57b8e4-0399-410d-a4ae-14451f3832f2","Type":"ContainerStarted","Data":"c64299989dae8a9bb6bc33c8aba8da844f6579c839f1d9047d3809be45c0ca25"} Sep 29 17:40:26 crc kubenswrapper[4592]: I0929 17:40:26.613214 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.06693302 podStartE2EDuration="48.613187651s" podCreationTimestamp="2025-09-29 17:39:38 +0000 UTC" firstStartedPulling="2025-09-29 17:39:40.239449232 +0000 UTC m=+2910.387226913" lastFinishedPulling="2025-09-29 17:40:24.785703873 +0000 UTC m=+2954.933481544" observedRunningTime="2025-09-29 17:40:26.60324859 +0000 UTC m=+2956.751026271" watchObservedRunningTime="2025-09-29 17:40:26.613187651 +0000 UTC m=+2956.760965352" Sep 29 17:42:30 crc kubenswrapper[4592]: I0929 17:42:30.883863 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:42:30 crc kubenswrapper[4592]: I0929 17:42:30.884519 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:43:00 crc kubenswrapper[4592]: I0929 17:43:00.883803 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:43:00 crc kubenswrapper[4592]: I0929 17:43:00.884369 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.482064 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.487033 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.512555 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.608878 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.609103 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc5c8\" (UniqueName: \"kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.609166 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.711497 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.712018 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.712210 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc5c8\" (UniqueName: \"kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.712276 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.712813 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.731495 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc5c8\" (UniqueName: \"kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8\") pod \"certified-operators-7mmqb\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:18 crc kubenswrapper[4592]: I0929 17:43:18.809834 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:19 crc kubenswrapper[4592]: I0929 17:43:19.592251 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:20 crc kubenswrapper[4592]: I0929 17:43:20.237598 4592 generic.go:334] "Generic (PLEG): container finished" podID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerID="d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc" exitCode=0 Sep 29 17:43:20 crc kubenswrapper[4592]: I0929 17:43:20.237645 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerDied","Data":"d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc"} Sep 29 17:43:20 crc kubenswrapper[4592]: I0929 17:43:20.237888 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerStarted","Data":"66515795fe49c55e25c68e7b9100ce11df5ec0010c15d2f68b0a2818c54e1c1c"} Sep 29 17:43:20 crc kubenswrapper[4592]: I0929 17:43:20.239569 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:43:22 crc kubenswrapper[4592]: I0929 17:43:22.257402 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerStarted","Data":"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161"} Sep 29 17:43:23 crc kubenswrapper[4592]: I0929 17:43:23.268208 4592 generic.go:334] "Generic (PLEG): container finished" podID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerID="81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161" exitCode=0 Sep 29 17:43:23 crc kubenswrapper[4592]: I0929 17:43:23.268501 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerDied","Data":"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161"} Sep 29 17:43:24 crc kubenswrapper[4592]: I0929 17:43:24.279947 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerStarted","Data":"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572"} Sep 29 17:43:24 crc kubenswrapper[4592]: I0929 17:43:24.303508 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7mmqb" podStartSLOduration=2.8720455830000002 podStartE2EDuration="6.303484283s" podCreationTimestamp="2025-09-29 17:43:18 +0000 UTC" firstStartedPulling="2025-09-29 17:43:20.23932423 +0000 UTC m=+3130.387101911" lastFinishedPulling="2025-09-29 17:43:23.67076293 +0000 UTC m=+3133.818540611" observedRunningTime="2025-09-29 17:43:24.296746929 +0000 UTC m=+3134.444524610" watchObservedRunningTime="2025-09-29 17:43:24.303484283 +0000 UTC m=+3134.451261964" Sep 29 17:43:28 crc kubenswrapper[4592]: I0929 17:43:28.811437 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:28 crc kubenswrapper[4592]: I0929 17:43:28.811789 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:29 crc kubenswrapper[4592]: I0929 17:43:29.860978 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-7mmqb" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="registry-server" probeResult="failure" output=< Sep 29 17:43:29 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:43:29 crc kubenswrapper[4592]: > Sep 29 17:43:30 crc kubenswrapper[4592]: I0929 17:43:30.883286 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:43:30 crc kubenswrapper[4592]: I0929 17:43:30.883923 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:43:30 crc kubenswrapper[4592]: I0929 17:43:30.884052 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:43:30 crc kubenswrapper[4592]: I0929 17:43:30.884813 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:43:30 crc kubenswrapper[4592]: I0929 17:43:30.884976 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" gracePeriod=600 Sep 29 17:43:31 crc kubenswrapper[4592]: E0929 17:43:31.011423 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:43:31 crc kubenswrapper[4592]: I0929 17:43:31.332976 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" exitCode=0 Sep 29 17:43:31 crc kubenswrapper[4592]: I0929 17:43:31.333015 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8"} Sep 29 17:43:31 crc kubenswrapper[4592]: I0929 17:43:31.333052 4592 scope.go:117] "RemoveContainer" containerID="07495eb7ef29693ea790ae318d8bd264e11aab5606abd43f7f82da970c653978" Sep 29 17:43:31 crc kubenswrapper[4592]: I0929 17:43:31.333556 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:43:31 crc kubenswrapper[4592]: E0929 17:43:31.333776 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:43:38 crc kubenswrapper[4592]: I0929 17:43:38.868775 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:38 crc kubenswrapper[4592]: I0929 17:43:38.923659 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:39 crc kubenswrapper[4592]: I0929 17:43:39.112570 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:40 crc kubenswrapper[4592]: I0929 17:43:40.409745 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7mmqb" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="registry-server" containerID="cri-o://1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572" gracePeriod=2 Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.367073 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.420115 4592 generic.go:334] "Generic (PLEG): container finished" podID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerID="1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572" exitCode=0 Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.420174 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerDied","Data":"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572"} Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.420198 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7mmqb" event={"ID":"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea","Type":"ContainerDied","Data":"66515795fe49c55e25c68e7b9100ce11df5ec0010c15d2f68b0a2818c54e1c1c"} Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.420215 4592 scope.go:117] "RemoveContainer" containerID="1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.420326 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7mmqb" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.454356 4592 scope.go:117] "RemoveContainer" containerID="81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.497086 4592 scope.go:117] "RemoveContainer" containerID="d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.505110 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content\") pod \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.505226 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc5c8\" (UniqueName: \"kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8\") pod \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.505282 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities\") pod \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\" (UID: \"e20fde8d-4ba0-44f1-8f69-33d1c075d4ea\") " Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.506590 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities" (OuterVolumeSpecName: "utilities") pod "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" (UID: "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.509005 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.512434 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8" (OuterVolumeSpecName: "kube-api-access-jc5c8") pod "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" (UID: "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea"). InnerVolumeSpecName "kube-api-access-jc5c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.529272 4592 scope.go:117] "RemoveContainer" containerID="1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572" Sep 29 17:43:41 crc kubenswrapper[4592]: E0929 17:43:41.530470 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572\": container with ID starting with 1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572 not found: ID does not exist" containerID="1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.530514 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572"} err="failed to get container status \"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572\": rpc error: code = NotFound desc = could not find container \"1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572\": container with ID starting with 1492fc3911ba0673084ee13d220c7d137a3e807bb42191e1bbebab99aa0a1572 not found: ID does not exist" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.530552 4592 scope.go:117] "RemoveContainer" containerID="81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161" Sep 29 17:43:41 crc kubenswrapper[4592]: E0929 17:43:41.531178 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161\": container with ID starting with 81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161 not found: ID does not exist" containerID="81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.531297 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161"} err="failed to get container status \"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161\": rpc error: code = NotFound desc = could not find container \"81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161\": container with ID starting with 81924d9f19fa379ba1887cbb3cc1dae9df919df784ede1bf80021a470e16d161 not found: ID does not exist" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.531314 4592 scope.go:117] "RemoveContainer" containerID="d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc" Sep 29 17:43:41 crc kubenswrapper[4592]: E0929 17:43:41.532056 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc\": container with ID starting with d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc not found: ID does not exist" containerID="d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.532089 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc"} err="failed to get container status \"d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc\": rpc error: code = NotFound desc = could not find container \"d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc\": container with ID starting with d97b123505e61c49a665a8e5c54e15ecbce7ae754b6ebd4e94dc037ab9f002cc not found: ID does not exist" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.565696 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" (UID: "e20fde8d-4ba0-44f1-8f69-33d1c075d4ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.611248 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.611287 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc5c8\" (UniqueName: \"kubernetes.io/projected/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea-kube-api-access-jc5c8\") on node \"crc\" DevicePath \"\"" Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.753238 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:41 crc kubenswrapper[4592]: I0929 17:43:41.763280 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7mmqb"] Sep 29 17:43:43 crc kubenswrapper[4592]: I0929 17:43:43.183573 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:43:43 crc kubenswrapper[4592]: E0929 17:43:43.184726 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:43:43 crc kubenswrapper[4592]: I0929 17:43:43.196868 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" path="/var/lib/kubelet/pods/e20fde8d-4ba0-44f1-8f69-33d1c075d4ea/volumes" Sep 29 17:43:54 crc kubenswrapper[4592]: I0929 17:43:54.183128 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:43:54 crc kubenswrapper[4592]: E0929 17:43:54.183964 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:44:05 crc kubenswrapper[4592]: I0929 17:44:05.184396 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:44:05 crc kubenswrapper[4592]: E0929 17:44:05.185518 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:44:17 crc kubenswrapper[4592]: I0929 17:44:17.186068 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:44:17 crc kubenswrapper[4592]: E0929 17:44:17.186946 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:44:28 crc kubenswrapper[4592]: I0929 17:44:28.183037 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:44:28 crc kubenswrapper[4592]: E0929 17:44:28.183848 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:44:43 crc kubenswrapper[4592]: I0929 17:44:43.183663 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:44:43 crc kubenswrapper[4592]: E0929 17:44:43.184451 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:44:54 crc kubenswrapper[4592]: I0929 17:44:54.183773 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:44:54 crc kubenswrapper[4592]: E0929 17:44:54.184664 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.191293 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p"] Sep 29 17:45:00 crc kubenswrapper[4592]: E0929 17:45:00.192487 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="extract-utilities" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.192505 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="extract-utilities" Sep 29 17:45:00 crc kubenswrapper[4592]: E0929 17:45:00.192521 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="extract-content" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.192529 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="extract-content" Sep 29 17:45:00 crc kubenswrapper[4592]: E0929 17:45:00.192560 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="registry-server" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.192568 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="registry-server" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.192802 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e20fde8d-4ba0-44f1-8f69-33d1c075d4ea" containerName="registry-server" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.193561 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.201308 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.202078 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.252213 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p"] Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.326542 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65kx9\" (UniqueName: \"kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.326608 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.326634 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.428548 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65kx9\" (UniqueName: \"kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.428828 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.428915 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.429902 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.437197 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.451939 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65kx9\" (UniqueName: \"kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9\") pod \"collect-profiles-29319465-5674p\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:00 crc kubenswrapper[4592]: I0929 17:45:00.545558 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:01 crc kubenswrapper[4592]: I0929 17:45:01.212556 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p"] Sep 29 17:45:01 crc kubenswrapper[4592]: E0929 17:45:01.931351 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc85afba1_7451_4d25_ada2_8d70b325ebf6.slice/crio-122281d4926e226bbf799f35db3a53ef07930ffcf47896af1fc3c0fb6235dcc3.scope\": RecentStats: unable to find data in memory cache]" Sep 29 17:45:02 crc kubenswrapper[4592]: I0929 17:45:02.083046 4592 generic.go:334] "Generic (PLEG): container finished" podID="c85afba1-7451-4d25-ada2-8d70b325ebf6" containerID="122281d4926e226bbf799f35db3a53ef07930ffcf47896af1fc3c0fb6235dcc3" exitCode=0 Sep 29 17:45:02 crc kubenswrapper[4592]: I0929 17:45:02.083094 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" event={"ID":"c85afba1-7451-4d25-ada2-8d70b325ebf6","Type":"ContainerDied","Data":"122281d4926e226bbf799f35db3a53ef07930ffcf47896af1fc3c0fb6235dcc3"} Sep 29 17:45:02 crc kubenswrapper[4592]: I0929 17:45:02.083125 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" event={"ID":"c85afba1-7451-4d25-ada2-8d70b325ebf6","Type":"ContainerStarted","Data":"707e2b01a4845c94848872109a3ca167bdd12192329ee94930d589e51ee1aeb0"} Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.595994 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.695993 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume\") pod \"c85afba1-7451-4d25-ada2-8d70b325ebf6\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.696294 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65kx9\" (UniqueName: \"kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9\") pod \"c85afba1-7451-4d25-ada2-8d70b325ebf6\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.696361 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume\") pod \"c85afba1-7451-4d25-ada2-8d70b325ebf6\" (UID: \"c85afba1-7451-4d25-ada2-8d70b325ebf6\") " Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.696622 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume" (OuterVolumeSpecName: "config-volume") pod "c85afba1-7451-4d25-ada2-8d70b325ebf6" (UID: "c85afba1-7451-4d25-ada2-8d70b325ebf6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.696945 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85afba1-7451-4d25-ada2-8d70b325ebf6-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.704448 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9" (OuterVolumeSpecName: "kube-api-access-65kx9") pod "c85afba1-7451-4d25-ada2-8d70b325ebf6" (UID: "c85afba1-7451-4d25-ada2-8d70b325ebf6"). InnerVolumeSpecName "kube-api-access-65kx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.705358 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c85afba1-7451-4d25-ada2-8d70b325ebf6" (UID: "c85afba1-7451-4d25-ada2-8d70b325ebf6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.798511 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85afba1-7451-4d25-ada2-8d70b325ebf6-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 17:45:03 crc kubenswrapper[4592]: I0929 17:45:03.798551 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65kx9\" (UniqueName: \"kubernetes.io/projected/c85afba1-7451-4d25-ada2-8d70b325ebf6-kube-api-access-65kx9\") on node \"crc\" DevicePath \"\"" Sep 29 17:45:04 crc kubenswrapper[4592]: I0929 17:45:04.112182 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" event={"ID":"c85afba1-7451-4d25-ada2-8d70b325ebf6","Type":"ContainerDied","Data":"707e2b01a4845c94848872109a3ca167bdd12192329ee94930d589e51ee1aeb0"} Sep 29 17:45:04 crc kubenswrapper[4592]: I0929 17:45:04.112260 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="707e2b01a4845c94848872109a3ca167bdd12192329ee94930d589e51ee1aeb0" Sep 29 17:45:04 crc kubenswrapper[4592]: I0929 17:45:04.112344 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319465-5674p" Sep 29 17:45:04 crc kubenswrapper[4592]: I0929 17:45:04.689615 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76"] Sep 29 17:45:04 crc kubenswrapper[4592]: I0929 17:45:04.697522 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319420-79l76"] Sep 29 17:45:05 crc kubenswrapper[4592]: I0929 17:45:05.195347 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33250f9a-7065-4cae-a125-33fe961c1ef4" path="/var/lib/kubelet/pods/33250f9a-7065-4cae-a125-33fe961c1ef4/volumes" Sep 29 17:45:08 crc kubenswrapper[4592]: I0929 17:45:08.184269 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:45:08 crc kubenswrapper[4592]: E0929 17:45:08.184780 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:45:19 crc kubenswrapper[4592]: I0929 17:45:19.183096 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:45:19 crc kubenswrapper[4592]: E0929 17:45:19.185030 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:45:22 crc kubenswrapper[4592]: I0929 17:45:22.517029 4592 scope.go:117] "RemoveContainer" containerID="b08b033c873a22417dff1442f96911218963743d32b7fdaeb51aba404769677d" Sep 29 17:45:31 crc kubenswrapper[4592]: I0929 17:45:31.193694 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:45:31 crc kubenswrapper[4592]: E0929 17:45:31.194800 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:45:43 crc kubenswrapper[4592]: I0929 17:45:43.183685 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:45:43 crc kubenswrapper[4592]: E0929 17:45:43.184437 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:45:56 crc kubenswrapper[4592]: I0929 17:45:56.183123 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:45:56 crc kubenswrapper[4592]: E0929 17:45:56.184989 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:46:11 crc kubenswrapper[4592]: I0929 17:46:11.193819 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:46:11 crc kubenswrapper[4592]: E0929 17:46:11.195857 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:46:25 crc kubenswrapper[4592]: I0929 17:46:25.183549 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:46:25 crc kubenswrapper[4592]: E0929 17:46:25.184457 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:46:36 crc kubenswrapper[4592]: I0929 17:46:36.182915 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:46:36 crc kubenswrapper[4592]: E0929 17:46:36.183770 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.704109 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:46:45 crc kubenswrapper[4592]: E0929 17:46:45.705101 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85afba1-7451-4d25-ada2-8d70b325ebf6" containerName="collect-profiles" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.705117 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85afba1-7451-4d25-ada2-8d70b325ebf6" containerName="collect-profiles" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.705415 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="c85afba1-7451-4d25-ada2-8d70b325ebf6" containerName="collect-profiles" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.707063 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.725945 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.791363 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnctw\" (UniqueName: \"kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.791436 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.791457 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.893401 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnctw\" (UniqueName: \"kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.893485 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.893507 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.894010 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.894101 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:45 crc kubenswrapper[4592]: I0929 17:46:45.914115 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnctw\" (UniqueName: \"kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw\") pod \"redhat-marketplace-fhcjb\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:46 crc kubenswrapper[4592]: I0929 17:46:46.025003 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:46 crc kubenswrapper[4592]: I0929 17:46:46.638536 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:46:46 crc kubenswrapper[4592]: I0929 17:46:46.979220 4592 generic.go:334] "Generic (PLEG): container finished" podID="781ea677-36c7-4740-afdf-fb081484cf0c" containerID="2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8" exitCode=0 Sep 29 17:46:46 crc kubenswrapper[4592]: I0929 17:46:46.979282 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerDied","Data":"2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8"} Sep 29 17:46:46 crc kubenswrapper[4592]: I0929 17:46:46.979538 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerStarted","Data":"08c391b5570abb3ae3929c0550789716b35f6fb02759319cf1b1d7fc70ad7314"} Sep 29 17:46:47 crc kubenswrapper[4592]: I0929 17:46:47.990115 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerStarted","Data":"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7"} Sep 29 17:46:48 crc kubenswrapper[4592]: I0929 17:46:48.999288 4592 generic.go:334] "Generic (PLEG): container finished" podID="781ea677-36c7-4740-afdf-fb081484cf0c" containerID="e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7" exitCode=0 Sep 29 17:46:48 crc kubenswrapper[4592]: I0929 17:46:48.999330 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerDied","Data":"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7"} Sep 29 17:46:49 crc kubenswrapper[4592]: I0929 17:46:49.184225 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:46:49 crc kubenswrapper[4592]: E0929 17:46:49.185207 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:46:50 crc kubenswrapper[4592]: I0929 17:46:50.008880 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerStarted","Data":"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985"} Sep 29 17:46:50 crc kubenswrapper[4592]: I0929 17:46:50.030352 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fhcjb" podStartSLOduration=2.619659906 podStartE2EDuration="5.03033563s" podCreationTimestamp="2025-09-29 17:46:45 +0000 UTC" firstStartedPulling="2025-09-29 17:46:46.980656992 +0000 UTC m=+3337.128434673" lastFinishedPulling="2025-09-29 17:46:49.391332716 +0000 UTC m=+3339.539110397" observedRunningTime="2025-09-29 17:46:50.02957631 +0000 UTC m=+3340.177353991" watchObservedRunningTime="2025-09-29 17:46:50.03033563 +0000 UTC m=+3340.178113311" Sep 29 17:46:56 crc kubenswrapper[4592]: I0929 17:46:56.026024 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:56 crc kubenswrapper[4592]: I0929 17:46:56.027624 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:56 crc kubenswrapper[4592]: I0929 17:46:56.125682 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:57 crc kubenswrapper[4592]: I0929 17:46:57.135873 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:57 crc kubenswrapper[4592]: I0929 17:46:57.216749 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.104721 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fhcjb" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="registry-server" containerID="cri-o://1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985" gracePeriod=2 Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.774353 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.958836 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnctw\" (UniqueName: \"kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw\") pod \"781ea677-36c7-4740-afdf-fb081484cf0c\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.958960 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content\") pod \"781ea677-36c7-4740-afdf-fb081484cf0c\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.959012 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities\") pod \"781ea677-36c7-4740-afdf-fb081484cf0c\" (UID: \"781ea677-36c7-4740-afdf-fb081484cf0c\") " Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.960023 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities" (OuterVolumeSpecName: "utilities") pod "781ea677-36c7-4740-afdf-fb081484cf0c" (UID: "781ea677-36c7-4740-afdf-fb081484cf0c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.970209 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "781ea677-36c7-4740-afdf-fb081484cf0c" (UID: "781ea677-36c7-4740-afdf-fb081484cf0c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:46:59 crc kubenswrapper[4592]: I0929 17:46:59.976394 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw" (OuterVolumeSpecName: "kube-api-access-gnctw") pod "781ea677-36c7-4740-afdf-fb081484cf0c" (UID: "781ea677-36c7-4740-afdf-fb081484cf0c"). InnerVolumeSpecName "kube-api-access-gnctw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.062370 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnctw\" (UniqueName: \"kubernetes.io/projected/781ea677-36c7-4740-afdf-fb081484cf0c-kube-api-access-gnctw\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.062423 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.062434 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/781ea677-36c7-4740-afdf-fb081484cf0c-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.115303 4592 generic.go:334] "Generic (PLEG): container finished" podID="781ea677-36c7-4740-afdf-fb081484cf0c" containerID="1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985" exitCode=0 Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.115364 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerDied","Data":"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985"} Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.115411 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fhcjb" event={"ID":"781ea677-36c7-4740-afdf-fb081484cf0c","Type":"ContainerDied","Data":"08c391b5570abb3ae3929c0550789716b35f6fb02759319cf1b1d7fc70ad7314"} Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.115439 4592 scope.go:117] "RemoveContainer" containerID="1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.115547 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fhcjb" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.156187 4592 scope.go:117] "RemoveContainer" containerID="e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.161989 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.175829 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fhcjb"] Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.188370 4592 scope.go:117] "RemoveContainer" containerID="2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.230771 4592 scope.go:117] "RemoveContainer" containerID="1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985" Sep 29 17:47:00 crc kubenswrapper[4592]: E0929 17:47:00.231166 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985\": container with ID starting with 1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985 not found: ID does not exist" containerID="1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.231191 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985"} err="failed to get container status \"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985\": rpc error: code = NotFound desc = could not find container \"1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985\": container with ID starting with 1d103430d7b62edad2ca9088b8ed02c20f91f09bbf2637c4b9e4ae9e76274985 not found: ID does not exist" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.231211 4592 scope.go:117] "RemoveContainer" containerID="e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7" Sep 29 17:47:00 crc kubenswrapper[4592]: E0929 17:47:00.231546 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7\": container with ID starting with e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7 not found: ID does not exist" containerID="e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.231568 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7"} err="failed to get container status \"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7\": rpc error: code = NotFound desc = could not find container \"e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7\": container with ID starting with e10e569fa0e2aa3e73f26cd788f467bb697c60426867f92697550573b274d7e7 not found: ID does not exist" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.231580 4592 scope.go:117] "RemoveContainer" containerID="2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8" Sep 29 17:47:00 crc kubenswrapper[4592]: E0929 17:47:00.231740 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8\": container with ID starting with 2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8 not found: ID does not exist" containerID="2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8" Sep 29 17:47:00 crc kubenswrapper[4592]: I0929 17:47:00.231756 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8"} err="failed to get container status \"2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8\": rpc error: code = NotFound desc = could not find container \"2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8\": container with ID starting with 2dab16d63018852c064e9a70e9f3e3caa6fec50fb6b6c5cdc5ded8e14f0662d8 not found: ID does not exist" Sep 29 17:47:01 crc kubenswrapper[4592]: I0929 17:47:01.196927 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" path="/var/lib/kubelet/pods/781ea677-36c7-4740-afdf-fb081484cf0c/volumes" Sep 29 17:47:04 crc kubenswrapper[4592]: I0929 17:47:04.183485 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:47:04 crc kubenswrapper[4592]: E0929 17:47:04.184182 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.224780 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:09 crc kubenswrapper[4592]: E0929 17:47:09.225706 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="extract-utilities" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.225721 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="extract-utilities" Sep 29 17:47:09 crc kubenswrapper[4592]: E0929 17:47:09.225745 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="extract-content" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.225752 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="extract-content" Sep 29 17:47:09 crc kubenswrapper[4592]: E0929 17:47:09.225786 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="registry-server" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.225797 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="registry-server" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.226027 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="781ea677-36c7-4740-afdf-fb081484cf0c" containerName="registry-server" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.227934 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.246627 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.342510 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.342552 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.342628 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8sfb\" (UniqueName: \"kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.444102 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.444165 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.444217 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8sfb\" (UniqueName: \"kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.444594 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.444720 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.461553 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8sfb\" (UniqueName: \"kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb\") pod \"community-operators-msd87\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:09 crc kubenswrapper[4592]: I0929 17:47:09.558452 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:10 crc kubenswrapper[4592]: I0929 17:47:10.176686 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:10 crc kubenswrapper[4592]: I0929 17:47:10.208301 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerStarted","Data":"776c98685c38d3bc5d1afcd4a801de8bd298968a1d975fefe9042720b8161493"} Sep 29 17:47:11 crc kubenswrapper[4592]: I0929 17:47:11.216588 4592 generic.go:334] "Generic (PLEG): container finished" podID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerID="6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7" exitCode=0 Sep 29 17:47:11 crc kubenswrapper[4592]: I0929 17:47:11.233830 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerDied","Data":"6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7"} Sep 29 17:47:13 crc kubenswrapper[4592]: I0929 17:47:13.236616 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerStarted","Data":"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412"} Sep 29 17:47:14 crc kubenswrapper[4592]: I0929 17:47:14.247621 4592 generic.go:334] "Generic (PLEG): container finished" podID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerID="4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412" exitCode=0 Sep 29 17:47:14 crc kubenswrapper[4592]: I0929 17:47:14.247667 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerDied","Data":"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412"} Sep 29 17:47:15 crc kubenswrapper[4592]: I0929 17:47:15.266061 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerStarted","Data":"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1"} Sep 29 17:47:15 crc kubenswrapper[4592]: I0929 17:47:15.294535 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-msd87" podStartSLOduration=2.892871817 podStartE2EDuration="6.294511086s" podCreationTimestamp="2025-09-29 17:47:09 +0000 UTC" firstStartedPulling="2025-09-29 17:47:11.241736422 +0000 UTC m=+3361.389514143" lastFinishedPulling="2025-09-29 17:47:14.643375731 +0000 UTC m=+3364.791153412" observedRunningTime="2025-09-29 17:47:15.28987421 +0000 UTC m=+3365.437651891" watchObservedRunningTime="2025-09-29 17:47:15.294511086 +0000 UTC m=+3365.442288767" Sep 29 17:47:18 crc kubenswrapper[4592]: I0929 17:47:18.183843 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:47:18 crc kubenswrapper[4592]: E0929 17:47:18.184432 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:47:19 crc kubenswrapper[4592]: I0929 17:47:19.559296 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:19 crc kubenswrapper[4592]: I0929 17:47:19.559621 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:20 crc kubenswrapper[4592]: I0929 17:47:20.606271 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-msd87" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="registry-server" probeResult="failure" output=< Sep 29 17:47:20 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:47:20 crc kubenswrapper[4592]: > Sep 29 17:47:29 crc kubenswrapper[4592]: I0929 17:47:29.183315 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:47:29 crc kubenswrapper[4592]: E0929 17:47:29.184056 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:47:29 crc kubenswrapper[4592]: I0929 17:47:29.607210 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:29 crc kubenswrapper[4592]: I0929 17:47:29.663329 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:29 crc kubenswrapper[4592]: I0929 17:47:29.852294 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:31 crc kubenswrapper[4592]: I0929 17:47:31.396672 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-msd87" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="registry-server" containerID="cri-o://9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1" gracePeriod=2 Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.041204 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.201625 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities\") pod \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.201853 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content\") pod \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.201939 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8sfb\" (UniqueName: \"kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb\") pod \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\" (UID: \"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d\") " Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.203191 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities" (OuterVolumeSpecName: "utilities") pod "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" (UID: "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.203351 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.210388 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb" (OuterVolumeSpecName: "kube-api-access-d8sfb") pod "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" (UID: "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d"). InnerVolumeSpecName "kube-api-access-d8sfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.266096 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" (UID: "1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.307621 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.307654 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8sfb\" (UniqueName: \"kubernetes.io/projected/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d-kube-api-access-d8sfb\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.410855 4592 generic.go:334] "Generic (PLEG): container finished" podID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerID="9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1" exitCode=0 Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.410896 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerDied","Data":"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1"} Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.410925 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-msd87" event={"ID":"1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d","Type":"ContainerDied","Data":"776c98685c38d3bc5d1afcd4a801de8bd298968a1d975fefe9042720b8161493"} Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.410948 4592 scope.go:117] "RemoveContainer" containerID="9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.411076 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-msd87" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.455497 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.455999 4592 scope.go:117] "RemoveContainer" containerID="4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.468575 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-msd87"] Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.492236 4592 scope.go:117] "RemoveContainer" containerID="6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.540025 4592 scope.go:117] "RemoveContainer" containerID="9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1" Sep 29 17:47:32 crc kubenswrapper[4592]: E0929 17:47:32.540661 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1\": container with ID starting with 9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1 not found: ID does not exist" containerID="9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.540691 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1"} err="failed to get container status \"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1\": rpc error: code = NotFound desc = could not find container \"9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1\": container with ID starting with 9795af3289d8f93fb938f131a152f187a8c5d6838865cb7b417cf04793b916d1 not found: ID does not exist" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.540712 4592 scope.go:117] "RemoveContainer" containerID="4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412" Sep 29 17:47:32 crc kubenswrapper[4592]: E0929 17:47:32.541078 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412\": container with ID starting with 4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412 not found: ID does not exist" containerID="4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.541119 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412"} err="failed to get container status \"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412\": rpc error: code = NotFound desc = could not find container \"4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412\": container with ID starting with 4e61c1236ea623c57d71cdb09c5f6ba118a2248bab8d8157f5a30bee879c8412 not found: ID does not exist" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.541147 4592 scope.go:117] "RemoveContainer" containerID="6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7" Sep 29 17:47:32 crc kubenswrapper[4592]: E0929 17:47:32.541600 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7\": container with ID starting with 6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7 not found: ID does not exist" containerID="6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7" Sep 29 17:47:32 crc kubenswrapper[4592]: I0929 17:47:32.541626 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7"} err="failed to get container status \"6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7\": rpc error: code = NotFound desc = could not find container \"6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7\": container with ID starting with 6f55c89ada695538553e88f2bf945ea39a7ac734b55676b29990b1f47d2c08c7 not found: ID does not exist" Sep 29 17:47:33 crc kubenswrapper[4592]: I0929 17:47:33.192657 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" path="/var/lib/kubelet/pods/1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d/volumes" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.262945 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:34 crc kubenswrapper[4592]: E0929 17:47:34.263327 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="extract-utilities" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.263338 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="extract-utilities" Sep 29 17:47:34 crc kubenswrapper[4592]: E0929 17:47:34.263366 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="extract-content" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.263372 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="extract-content" Sep 29 17:47:34 crc kubenswrapper[4592]: E0929 17:47:34.263382 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="registry-server" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.263388 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="registry-server" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.263558 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b0dbe9a-4bfd-4d4c-8221-ce131a27d83d" containerName="registry-server" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.264803 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.277563 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.450214 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.450374 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.450601 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trrbw\" (UniqueName: \"kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.553067 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trrbw\" (UniqueName: \"kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.553464 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.553627 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.553982 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.554050 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.573743 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trrbw\" (UniqueName: \"kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw\") pod \"redhat-operators-lhdjw\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:34 crc kubenswrapper[4592]: I0929 17:47:34.596740 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:35 crc kubenswrapper[4592]: I0929 17:47:35.120641 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:35 crc kubenswrapper[4592]: I0929 17:47:35.439887 4592 generic.go:334] "Generic (PLEG): container finished" podID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerID="3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811" exitCode=0 Sep 29 17:47:35 crc kubenswrapper[4592]: I0929 17:47:35.440002 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerDied","Data":"3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811"} Sep 29 17:47:35 crc kubenswrapper[4592]: I0929 17:47:35.440258 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerStarted","Data":"32548801e55348ad77362693e2d0e2f4d8d16e30ae8c857219b08c5a9b9471d7"} Sep 29 17:47:36 crc kubenswrapper[4592]: I0929 17:47:36.450292 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerStarted","Data":"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f"} Sep 29 17:47:40 crc kubenswrapper[4592]: I0929 17:47:40.499211 4592 generic.go:334] "Generic (PLEG): container finished" podID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerID="79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f" exitCode=0 Sep 29 17:47:40 crc kubenswrapper[4592]: I0929 17:47:40.499283 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerDied","Data":"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f"} Sep 29 17:47:41 crc kubenswrapper[4592]: I0929 17:47:41.511641 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerStarted","Data":"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331"} Sep 29 17:47:41 crc kubenswrapper[4592]: I0929 17:47:41.538098 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lhdjw" podStartSLOduration=1.9721085010000001 podStartE2EDuration="7.538081392s" podCreationTimestamp="2025-09-29 17:47:34 +0000 UTC" firstStartedPulling="2025-09-29 17:47:35.441622875 +0000 UTC m=+3385.589400556" lastFinishedPulling="2025-09-29 17:47:41.007595756 +0000 UTC m=+3391.155373447" observedRunningTime="2025-09-29 17:47:41.531675448 +0000 UTC m=+3391.679453139" watchObservedRunningTime="2025-09-29 17:47:41.538081392 +0000 UTC m=+3391.685859073" Sep 29 17:47:43 crc kubenswrapper[4592]: I0929 17:47:43.183795 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:47:43 crc kubenswrapper[4592]: E0929 17:47:43.184273 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:47:44 crc kubenswrapper[4592]: I0929 17:47:44.597786 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:44 crc kubenswrapper[4592]: I0929 17:47:44.597834 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:45 crc kubenswrapper[4592]: I0929 17:47:45.647643 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lhdjw" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="registry-server" probeResult="failure" output=< Sep 29 17:47:45 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:47:45 crc kubenswrapper[4592]: > Sep 29 17:47:54 crc kubenswrapper[4592]: I0929 17:47:54.649553 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:54 crc kubenswrapper[4592]: I0929 17:47:54.711265 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:54 crc kubenswrapper[4592]: I0929 17:47:54.887486 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:56 crc kubenswrapper[4592]: I0929 17:47:56.656699 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lhdjw" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="registry-server" containerID="cri-o://3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331" gracePeriod=2 Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.188962 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:47:57 crc kubenswrapper[4592]: E0929 17:47:57.189506 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.197436 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.251572 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities\") pod \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.252341 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities" (OuterVolumeSpecName: "utilities") pod "4d74ae6e-7bf5-4672-94af-53ea0029bb8e" (UID: "4d74ae6e-7bf5-4672-94af-53ea0029bb8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.353170 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content\") pod \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.353586 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trrbw\" (UniqueName: \"kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw\") pod \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\" (UID: \"4d74ae6e-7bf5-4672-94af-53ea0029bb8e\") " Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.354224 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.366161 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw" (OuterVolumeSpecName: "kube-api-access-trrbw") pod "4d74ae6e-7bf5-4672-94af-53ea0029bb8e" (UID: "4d74ae6e-7bf5-4672-94af-53ea0029bb8e"). InnerVolumeSpecName "kube-api-access-trrbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.463967 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trrbw\" (UniqueName: \"kubernetes.io/projected/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-kube-api-access-trrbw\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.473557 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d74ae6e-7bf5-4672-94af-53ea0029bb8e" (UID: "4d74ae6e-7bf5-4672-94af-53ea0029bb8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.566080 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d74ae6e-7bf5-4672-94af-53ea0029bb8e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.725708 4592 generic.go:334] "Generic (PLEG): container finished" podID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerID="3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331" exitCode=0 Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.725749 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerDied","Data":"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331"} Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.725778 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lhdjw" event={"ID":"4d74ae6e-7bf5-4672-94af-53ea0029bb8e","Type":"ContainerDied","Data":"32548801e55348ad77362693e2d0e2f4d8d16e30ae8c857219b08c5a9b9471d7"} Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.725799 4592 scope.go:117] "RemoveContainer" containerID="3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.725966 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lhdjw" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.757439 4592 scope.go:117] "RemoveContainer" containerID="79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.811244 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.834302 4592 scope.go:117] "RemoveContainer" containerID="3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.844647 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lhdjw"] Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.863104 4592 scope.go:117] "RemoveContainer" containerID="3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331" Sep 29 17:47:57 crc kubenswrapper[4592]: E0929 17:47:57.863771 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331\": container with ID starting with 3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331 not found: ID does not exist" containerID="3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.863818 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331"} err="failed to get container status \"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331\": rpc error: code = NotFound desc = could not find container \"3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331\": container with ID starting with 3f38b1bb11a9037e155077493d5b02038a27ddbac9b19a64f2665c5448c9b331 not found: ID does not exist" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.863848 4592 scope.go:117] "RemoveContainer" containerID="79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f" Sep 29 17:47:57 crc kubenswrapper[4592]: E0929 17:47:57.864175 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f\": container with ID starting with 79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f not found: ID does not exist" containerID="79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.864215 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f"} err="failed to get container status \"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f\": rpc error: code = NotFound desc = could not find container \"79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f\": container with ID starting with 79f1a5d351de464f30f4c7c733c18e1c24e1336037bea079b80a0a41d675381f not found: ID does not exist" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.864243 4592 scope.go:117] "RemoveContainer" containerID="3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811" Sep 29 17:47:57 crc kubenswrapper[4592]: E0929 17:47:57.864498 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811\": container with ID starting with 3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811 not found: ID does not exist" containerID="3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811" Sep 29 17:47:57 crc kubenswrapper[4592]: I0929 17:47:57.864532 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811"} err="failed to get container status \"3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811\": rpc error: code = NotFound desc = could not find container \"3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811\": container with ID starting with 3b28f20324cfc3ebb599cb419fb2b72596bc4aadbe5b37f5f52fd04250592811 not found: ID does not exist" Sep 29 17:47:59 crc kubenswrapper[4592]: I0929 17:47:59.196137 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" path="/var/lib/kubelet/pods/4d74ae6e-7bf5-4672-94af-53ea0029bb8e/volumes" Sep 29 17:48:09 crc kubenswrapper[4592]: I0929 17:48:09.183636 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:48:09 crc kubenswrapper[4592]: E0929 17:48:09.184638 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:48:21 crc kubenswrapper[4592]: I0929 17:48:21.188804 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:48:21 crc kubenswrapper[4592]: E0929 17:48:21.189604 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:48:36 crc kubenswrapper[4592]: I0929 17:48:36.183307 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:48:37 crc kubenswrapper[4592]: I0929 17:48:37.079687 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b"} Sep 29 17:51:00 crc kubenswrapper[4592]: I0929 17:51:00.882765 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:51:00 crc kubenswrapper[4592]: I0929 17:51:00.884226 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:51:30 crc kubenswrapper[4592]: I0929 17:51:30.882910 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:51:30 crc kubenswrapper[4592]: I0929 17:51:30.883624 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:52:00 crc kubenswrapper[4592]: I0929 17:52:00.883223 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:52:00 crc kubenswrapper[4592]: I0929 17:52:00.883856 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:52:00 crc kubenswrapper[4592]: I0929 17:52:00.883919 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:52:00 crc kubenswrapper[4592]: I0929 17:52:00.884932 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:52:00 crc kubenswrapper[4592]: I0929 17:52:00.885031 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b" gracePeriod=600 Sep 29 17:52:01 crc kubenswrapper[4592]: I0929 17:52:01.932196 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b" exitCode=0 Sep 29 17:52:01 crc kubenswrapper[4592]: I0929 17:52:01.932215 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b"} Sep 29 17:52:01 crc kubenswrapper[4592]: I0929 17:52:01.934353 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc"} Sep 29 17:52:01 crc kubenswrapper[4592]: I0929 17:52:01.934428 4592 scope.go:117] "RemoveContainer" containerID="415ad75e1b64a05f5c0d36b53d49daf4ec8d51fa492eeac1c4c633fa74e4f2a8" Sep 29 17:54:30 crc kubenswrapper[4592]: I0929 17:54:30.882919 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:54:30 crc kubenswrapper[4592]: I0929 17:54:30.883485 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:55:00 crc kubenswrapper[4592]: I0929 17:55:00.883120 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:55:00 crc kubenswrapper[4592]: I0929 17:55:00.884649 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:55:30 crc kubenswrapper[4592]: I0929 17:55:30.882889 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 17:55:30 crc kubenswrapper[4592]: I0929 17:55:30.883471 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 17:55:30 crc kubenswrapper[4592]: I0929 17:55:30.883532 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 17:55:30 crc kubenswrapper[4592]: I0929 17:55:30.884457 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 17:55:30 crc kubenswrapper[4592]: I0929 17:55:30.884517 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" gracePeriod=600 Sep 29 17:55:31 crc kubenswrapper[4592]: E0929 17:55:31.014319 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:55:31 crc kubenswrapper[4592]: I0929 17:55:31.745075 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" exitCode=0 Sep 29 17:55:31 crc kubenswrapper[4592]: I0929 17:55:31.745191 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc"} Sep 29 17:55:31 crc kubenswrapper[4592]: I0929 17:55:31.745452 4592 scope.go:117] "RemoveContainer" containerID="74bf0afc968f6204f6bcfeced21fb989e8b331d81ea9a29f9be78deba78f328b" Sep 29 17:55:31 crc kubenswrapper[4592]: I0929 17:55:31.746661 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:55:31 crc kubenswrapper[4592]: E0929 17:55:31.747553 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:55:46 crc kubenswrapper[4592]: I0929 17:55:46.183332 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:55:46 crc kubenswrapper[4592]: E0929 17:55:46.184049 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:00 crc kubenswrapper[4592]: I0929 17:56:00.183572 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:00 crc kubenswrapper[4592]: E0929 17:56:00.184401 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:12 crc kubenswrapper[4592]: I0929 17:56:12.182867 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:12 crc kubenswrapper[4592]: E0929 17:56:12.183592 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:23 crc kubenswrapper[4592]: I0929 17:56:23.183461 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:23 crc kubenswrapper[4592]: E0929 17:56:23.184324 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:34 crc kubenswrapper[4592]: I0929 17:56:34.182812 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:34 crc kubenswrapper[4592]: E0929 17:56:34.183438 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:45 crc kubenswrapper[4592]: I0929 17:56:45.184969 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:45 crc kubenswrapper[4592]: E0929 17:56:45.185702 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:56:59 crc kubenswrapper[4592]: I0929 17:56:59.182827 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:56:59 crc kubenswrapper[4592]: E0929 17:56:59.183754 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:57:11 crc kubenswrapper[4592]: I0929 17:57:11.207843 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:57:11 crc kubenswrapper[4592]: E0929 17:57:11.208736 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:57:23 crc kubenswrapper[4592]: I0929 17:57:23.183513 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:57:23 crc kubenswrapper[4592]: E0929 17:57:23.185682 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.026547 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:24 crc kubenswrapper[4592]: E0929 17:57:24.027786 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="registry-server" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.028033 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="registry-server" Sep 29 17:57:24 crc kubenswrapper[4592]: E0929 17:57:24.028273 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="extract-content" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.028428 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="extract-content" Sep 29 17:57:24 crc kubenswrapper[4592]: E0929 17:57:24.028574 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="extract-utilities" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.028690 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="extract-utilities" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.029227 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d74ae6e-7bf5-4672-94af-53ea0029bb8e" containerName="registry-server" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.035647 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.054177 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.190394 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.190471 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf7x2\" (UniqueName: \"kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.190510 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.292490 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.292808 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf7x2\" (UniqueName: \"kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.292958 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.293111 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.293327 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.326791 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf7x2\" (UniqueName: \"kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2\") pod \"community-operators-2ltnr\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.367991 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:24 crc kubenswrapper[4592]: I0929 17:57:24.934892 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:25 crc kubenswrapper[4592]: I0929 17:57:25.823620 4592 generic.go:334] "Generic (PLEG): container finished" podID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerID="e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499" exitCode=0 Sep 29 17:57:25 crc kubenswrapper[4592]: I0929 17:57:25.823782 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerDied","Data":"e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499"} Sep 29 17:57:25 crc kubenswrapper[4592]: I0929 17:57:25.823992 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerStarted","Data":"4eb1ea3bb9e0080c8e0623b2bc9b18bf72498ab97fa0e38bc3c4eada28699652"} Sep 29 17:57:25 crc kubenswrapper[4592]: I0929 17:57:25.826036 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 17:57:26 crc kubenswrapper[4592]: I0929 17:57:26.836205 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerStarted","Data":"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077"} Sep 29 17:57:28 crc kubenswrapper[4592]: I0929 17:57:28.860016 4592 generic.go:334] "Generic (PLEG): container finished" podID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerID="e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077" exitCode=0 Sep 29 17:57:28 crc kubenswrapper[4592]: I0929 17:57:28.860092 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerDied","Data":"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077"} Sep 29 17:57:30 crc kubenswrapper[4592]: I0929 17:57:30.884817 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerStarted","Data":"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852"} Sep 29 17:57:30 crc kubenswrapper[4592]: I0929 17:57:30.912740 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2ltnr" podStartSLOduration=4.432954991 podStartE2EDuration="7.912724253s" podCreationTimestamp="2025-09-29 17:57:23 +0000 UTC" firstStartedPulling="2025-09-29 17:57:25.82570324 +0000 UTC m=+3975.973480931" lastFinishedPulling="2025-09-29 17:57:29.305472512 +0000 UTC m=+3979.453250193" observedRunningTime="2025-09-29 17:57:30.911408646 +0000 UTC m=+3981.059186327" watchObservedRunningTime="2025-09-29 17:57:30.912724253 +0000 UTC m=+3981.060501934" Sep 29 17:57:34 crc kubenswrapper[4592]: I0929 17:57:34.369003 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:34 crc kubenswrapper[4592]: I0929 17:57:34.369360 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:34 crc kubenswrapper[4592]: I0929 17:57:34.439557 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.187346 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:57:38 crc kubenswrapper[4592]: E0929 17:57:38.189077 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.499983 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.503813 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.516477 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.580156 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.580294 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.580435 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk6d6\" (UniqueName: \"kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.688440 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.690592 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.692397 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk6d6\" (UniqueName: \"kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.692515 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.693084 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.715341 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk6d6\" (UniqueName: \"kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6\") pod \"redhat-operators-fclrk\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:38 crc kubenswrapper[4592]: I0929 17:57:38.850239 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:39 crc kubenswrapper[4592]: I0929 17:57:39.313017 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:57:39 crc kubenswrapper[4592]: I0929 17:57:39.976993 4592 generic.go:334] "Generic (PLEG): container finished" podID="effc1857-ae08-4e26-b577-546cf5770f8e" containerID="12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570" exitCode=0 Sep 29 17:57:39 crc kubenswrapper[4592]: I0929 17:57:39.977035 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerDied","Data":"12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570"} Sep 29 17:57:39 crc kubenswrapper[4592]: I0929 17:57:39.977301 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerStarted","Data":"94c16c77aa02d77ceeba30cbb43b86e32f68c2891bfa937f69d3e1e37df088a8"} Sep 29 17:57:41 crc kubenswrapper[4592]: I0929 17:57:41.997770 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerStarted","Data":"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777"} Sep 29 17:57:44 crc kubenswrapper[4592]: I0929 17:57:44.445989 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:44 crc kubenswrapper[4592]: I0929 17:57:44.512550 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.040461 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2ltnr" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="registry-server" containerID="cri-o://8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852" gracePeriod=2 Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.694988 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.727838 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content\") pod \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.728002 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf7x2\" (UniqueName: \"kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2\") pod \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.728065 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities\") pod \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\" (UID: \"741ba53a-2ff6-45f3-a553-1f696b34d3c6\") " Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.729002 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities" (OuterVolumeSpecName: "utilities") pod "741ba53a-2ff6-45f3-a553-1f696b34d3c6" (UID: "741ba53a-2ff6-45f3-a553-1f696b34d3c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.750052 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2" (OuterVolumeSpecName: "kube-api-access-sf7x2") pod "741ba53a-2ff6-45f3-a553-1f696b34d3c6" (UID: "741ba53a-2ff6-45f3-a553-1f696b34d3c6"). InnerVolumeSpecName "kube-api-access-sf7x2". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.769299 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "741ba53a-2ff6-45f3-a553-1f696b34d3c6" (UID: "741ba53a-2ff6-45f3-a553-1f696b34d3c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.830833 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.831091 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf7x2\" (UniqueName: \"kubernetes.io/projected/741ba53a-2ff6-45f3-a553-1f696b34d3c6-kube-api-access-sf7x2\") on node \"crc\" DevicePath \"\"" Sep 29 17:57:45 crc kubenswrapper[4592]: I0929 17:57:45.831191 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/741ba53a-2ff6-45f3-a553-1f696b34d3c6-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.056703 4592 generic.go:334] "Generic (PLEG): container finished" podID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerID="8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852" exitCode=0 Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.056806 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerDied","Data":"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852"} Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.056848 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2ltnr" event={"ID":"741ba53a-2ff6-45f3-a553-1f696b34d3c6","Type":"ContainerDied","Data":"4eb1ea3bb9e0080c8e0623b2bc9b18bf72498ab97fa0e38bc3c4eada28699652"} Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.056876 4592 scope.go:117] "RemoveContainer" containerID="8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.057081 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2ltnr" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.064763 4592 generic.go:334] "Generic (PLEG): container finished" podID="effc1857-ae08-4e26-b577-546cf5770f8e" containerID="64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777" exitCode=0 Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.064806 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerDied","Data":"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777"} Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.095508 4592 scope.go:117] "RemoveContainer" containerID="e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.125647 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.136991 4592 scope.go:117] "RemoveContainer" containerID="e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.142014 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2ltnr"] Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.178803 4592 scope.go:117] "RemoveContainer" containerID="8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852" Sep 29 17:57:46 crc kubenswrapper[4592]: E0929 17:57:46.179248 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852\": container with ID starting with 8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852 not found: ID does not exist" containerID="8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.179279 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852"} err="failed to get container status \"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852\": rpc error: code = NotFound desc = could not find container \"8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852\": container with ID starting with 8b46bc572ddeb3f7e2749441d19d9c43180f8aa14eec08ca9e58ca1e5e18f852 not found: ID does not exist" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.179299 4592 scope.go:117] "RemoveContainer" containerID="e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077" Sep 29 17:57:46 crc kubenswrapper[4592]: E0929 17:57:46.179633 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077\": container with ID starting with e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077 not found: ID does not exist" containerID="e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.179651 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077"} err="failed to get container status \"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077\": rpc error: code = NotFound desc = could not find container \"e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077\": container with ID starting with e99877879e5b468e3bce29d625cafb5f661e13b128d2e9723425885307ca4077 not found: ID does not exist" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.179663 4592 scope.go:117] "RemoveContainer" containerID="e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499" Sep 29 17:57:46 crc kubenswrapper[4592]: E0929 17:57:46.180050 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499\": container with ID starting with e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499 not found: ID does not exist" containerID="e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499" Sep 29 17:57:46 crc kubenswrapper[4592]: I0929 17:57:46.180072 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499"} err="failed to get container status \"e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499\": rpc error: code = NotFound desc = could not find container \"e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499\": container with ID starting with e4b0746242036f81707f8f4f14f680bcf19daa9c83aa1a869d0bb23dc2b70499 not found: ID does not exist" Sep 29 17:57:47 crc kubenswrapper[4592]: I0929 17:57:47.080530 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerStarted","Data":"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3"} Sep 29 17:57:47 crc kubenswrapper[4592]: I0929 17:57:47.116885 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fclrk" podStartSLOduration=2.411500491 podStartE2EDuration="9.116864838s" podCreationTimestamp="2025-09-29 17:57:38 +0000 UTC" firstStartedPulling="2025-09-29 17:57:39.979097995 +0000 UTC m=+3990.126875676" lastFinishedPulling="2025-09-29 17:57:46.684462332 +0000 UTC m=+3996.832240023" observedRunningTime="2025-09-29 17:57:47.114531584 +0000 UTC m=+3997.262309265" watchObservedRunningTime="2025-09-29 17:57:47.116864838 +0000 UTC m=+3997.264642519" Sep 29 17:57:47 crc kubenswrapper[4592]: I0929 17:57:47.195587 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" path="/var/lib/kubelet/pods/741ba53a-2ff6-45f3-a553-1f696b34d3c6/volumes" Sep 29 17:57:48 crc kubenswrapper[4592]: I0929 17:57:48.851364 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:48 crc kubenswrapper[4592]: I0929 17:57:48.851790 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:49 crc kubenswrapper[4592]: I0929 17:57:49.910579 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fclrk" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="registry-server" probeResult="failure" output=< Sep 29 17:57:49 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 17:57:49 crc kubenswrapper[4592]: > Sep 29 17:57:50 crc kubenswrapper[4592]: I0929 17:57:50.183250 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:57:50 crc kubenswrapper[4592]: E0929 17:57:50.184079 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:57:58 crc kubenswrapper[4592]: I0929 17:57:58.916053 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:58 crc kubenswrapper[4592]: I0929 17:57:58.978118 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:57:59 crc kubenswrapper[4592]: I0929 17:57:59.169640 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.216770 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fclrk" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="registry-server" containerID="cri-o://cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3" gracePeriod=2 Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.691788 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.792067 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content\") pod \"effc1857-ae08-4e26-b577-546cf5770f8e\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.798701 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk6d6\" (UniqueName: \"kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6\") pod \"effc1857-ae08-4e26-b577-546cf5770f8e\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.798895 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities\") pod \"effc1857-ae08-4e26-b577-546cf5770f8e\" (UID: \"effc1857-ae08-4e26-b577-546cf5770f8e\") " Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.800398 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities" (OuterVolumeSpecName: "utilities") pod "effc1857-ae08-4e26-b577-546cf5770f8e" (UID: "effc1857-ae08-4e26-b577-546cf5770f8e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.806444 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6" (OuterVolumeSpecName: "kube-api-access-nk6d6") pod "effc1857-ae08-4e26-b577-546cf5770f8e" (UID: "effc1857-ae08-4e26-b577-546cf5770f8e"). InnerVolumeSpecName "kube-api-access-nk6d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.884884 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "effc1857-ae08-4e26-b577-546cf5770f8e" (UID: "effc1857-ae08-4e26-b577-546cf5770f8e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.901669 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk6d6\" (UniqueName: \"kubernetes.io/projected/effc1857-ae08-4e26-b577-546cf5770f8e-kube-api-access-nk6d6\") on node \"crc\" DevicePath \"\"" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.901703 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:58:00 crc kubenswrapper[4592]: I0929 17:58:00.901713 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/effc1857-ae08-4e26-b577-546cf5770f8e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.226979 4592 generic.go:334] "Generic (PLEG): container finished" podID="effc1857-ae08-4e26-b577-546cf5770f8e" containerID="cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3" exitCode=0 Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.227029 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerDied","Data":"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3"} Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.227036 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fclrk" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.227058 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fclrk" event={"ID":"effc1857-ae08-4e26-b577-546cf5770f8e","Type":"ContainerDied","Data":"94c16c77aa02d77ceeba30cbb43b86e32f68c2891bfa937f69d3e1e37df088a8"} Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.227079 4592 scope.go:117] "RemoveContainer" containerID="cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.261872 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.265846 4592 scope.go:117] "RemoveContainer" containerID="64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.270578 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fclrk"] Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.308530 4592 scope.go:117] "RemoveContainer" containerID="12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.373269 4592 scope.go:117] "RemoveContainer" containerID="cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3" Sep 29 17:58:01 crc kubenswrapper[4592]: E0929 17:58:01.373800 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3\": container with ID starting with cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3 not found: ID does not exist" containerID="cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.373843 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3"} err="failed to get container status \"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3\": rpc error: code = NotFound desc = could not find container \"cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3\": container with ID starting with cf4ad78e206e19d8bf348c83c96087c8063b781829b790aa1e1b0585bd9261b3 not found: ID does not exist" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.373880 4592 scope.go:117] "RemoveContainer" containerID="64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777" Sep 29 17:58:01 crc kubenswrapper[4592]: E0929 17:58:01.374311 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777\": container with ID starting with 64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777 not found: ID does not exist" containerID="64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.374335 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777"} err="failed to get container status \"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777\": rpc error: code = NotFound desc = could not find container \"64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777\": container with ID starting with 64332452b0a6b708199bedf8927487c45719c9834c47a442edad370e59e54777 not found: ID does not exist" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.374350 4592 scope.go:117] "RemoveContainer" containerID="12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570" Sep 29 17:58:01 crc kubenswrapper[4592]: E0929 17:58:01.374636 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570\": container with ID starting with 12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570 not found: ID does not exist" containerID="12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570" Sep 29 17:58:01 crc kubenswrapper[4592]: I0929 17:58:01.374680 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570"} err="failed to get container status \"12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570\": rpc error: code = NotFound desc = could not find container \"12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570\": container with ID starting with 12ac4fb3df4579bde37451ea74396d4b768c48e126b52f58ecb29e37b3c06570 not found: ID does not exist" Sep 29 17:58:03 crc kubenswrapper[4592]: I0929 17:58:03.196186 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" path="/var/lib/kubelet/pods/effc1857-ae08-4e26-b577-546cf5770f8e/volumes" Sep 29 17:58:04 crc kubenswrapper[4592]: I0929 17:58:04.182933 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:58:04 crc kubenswrapper[4592]: E0929 17:58:04.183464 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:58:19 crc kubenswrapper[4592]: I0929 17:58:19.184247 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:58:19 crc kubenswrapper[4592]: E0929 17:58:19.185292 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:58:32 crc kubenswrapper[4592]: I0929 17:58:32.183358 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:58:32 crc kubenswrapper[4592]: E0929 17:58:32.184332 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:58:43 crc kubenswrapper[4592]: I0929 17:58:43.183204 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:58:43 crc kubenswrapper[4592]: E0929 17:58:43.184179 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.696540 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697447 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="extract-utilities" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697461 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="extract-utilities" Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697471 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697478 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697495 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="extract-content" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697501 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="extract-content" Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697516 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="extract-utilities" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697522 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="extract-utilities" Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697546 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="extract-content" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697552 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="extract-content" Sep 29 17:58:49 crc kubenswrapper[4592]: E0929 17:58:49.697566 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697571 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697765 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="741ba53a-2ff6-45f3-a553-1f696b34d3c6" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.697789 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="effc1857-ae08-4e26-b577-546cf5770f8e" containerName="registry-server" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.699101 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.723851 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.886775 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bms5\" (UniqueName: \"kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.886839 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.886905 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.989252 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.989797 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.990132 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bms5\" (UniqueName: \"kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.990278 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:49 crc kubenswrapper[4592]: I0929 17:58:49.990641 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:50 crc kubenswrapper[4592]: I0929 17:58:50.353565 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bms5\" (UniqueName: \"kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5\") pod \"redhat-marketplace-gk48g\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:50 crc kubenswrapper[4592]: I0929 17:58:50.631568 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.095975 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.106054 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.126361 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.180637 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.217121 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj8jw\" (UniqueName: \"kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.217248 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.217292 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.318593 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj8jw\" (UniqueName: \"kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.318652 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.318699 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.320188 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.320437 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.339951 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj8jw\" (UniqueName: \"kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw\") pod \"certified-operators-m4m85\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.447506 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.776554 4592 generic.go:334] "Generic (PLEG): container finished" podID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerID="0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7" exitCode=0 Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.776594 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerDied","Data":"0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7"} Sep 29 17:58:51 crc kubenswrapper[4592]: I0929 17:58:51.776622 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerStarted","Data":"167ff174881e6a1fe5c393f0f3d61661e3e618d2d4cbde2fc2cb6e843ca46e52"} Sep 29 17:58:52 crc kubenswrapper[4592]: I0929 17:58:52.068430 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:58:52 crc kubenswrapper[4592]: W0929 17:58:52.082264 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6dd8f4a_ff95_4a03_b0b4_752e2827c9b3.slice/crio-35e8e1d2393b3f5c42be5c3c8cb8dbf0ba6318ab9c0b73e004d448719331b814 WatchSource:0}: Error finding container 35e8e1d2393b3f5c42be5c3c8cb8dbf0ba6318ab9c0b73e004d448719331b814: Status 404 returned error can't find the container with id 35e8e1d2393b3f5c42be5c3c8cb8dbf0ba6318ab9c0b73e004d448719331b814 Sep 29 17:58:52 crc kubenswrapper[4592]: I0929 17:58:52.788186 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerStarted","Data":"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5"} Sep 29 17:58:52 crc kubenswrapper[4592]: I0929 17:58:52.790892 4592 generic.go:334] "Generic (PLEG): container finished" podID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerID="954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f" exitCode=0 Sep 29 17:58:52 crc kubenswrapper[4592]: I0929 17:58:52.790936 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerDied","Data":"954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f"} Sep 29 17:58:52 crc kubenswrapper[4592]: I0929 17:58:52.790966 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerStarted","Data":"35e8e1d2393b3f5c42be5c3c8cb8dbf0ba6318ab9c0b73e004d448719331b814"} Sep 29 17:58:53 crc kubenswrapper[4592]: I0929 17:58:53.800048 4592 generic.go:334] "Generic (PLEG): container finished" podID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerID="7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5" exitCode=0 Sep 29 17:58:53 crc kubenswrapper[4592]: I0929 17:58:53.800176 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerDied","Data":"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5"} Sep 29 17:58:54 crc kubenswrapper[4592]: I0929 17:58:54.811481 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerStarted","Data":"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4"} Sep 29 17:58:54 crc kubenswrapper[4592]: I0929 17:58:54.814496 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerStarted","Data":"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e"} Sep 29 17:58:54 crc kubenswrapper[4592]: I0929 17:58:54.840655 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gk48g" podStartSLOduration=3.380887092 podStartE2EDuration="5.84063651s" podCreationTimestamp="2025-09-29 17:58:49 +0000 UTC" firstStartedPulling="2025-09-29 17:58:51.778387729 +0000 UTC m=+4061.926165410" lastFinishedPulling="2025-09-29 17:58:54.238137147 +0000 UTC m=+4064.385914828" observedRunningTime="2025-09-29 17:58:54.831873651 +0000 UTC m=+4064.979651332" watchObservedRunningTime="2025-09-29 17:58:54.84063651 +0000 UTC m=+4064.988414191" Sep 29 17:58:55 crc kubenswrapper[4592]: I0929 17:58:55.824838 4592 generic.go:334] "Generic (PLEG): container finished" podID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerID="3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e" exitCode=0 Sep 29 17:58:55 crc kubenswrapper[4592]: I0929 17:58:55.824892 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerDied","Data":"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e"} Sep 29 17:58:56 crc kubenswrapper[4592]: I0929 17:58:56.835646 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerStarted","Data":"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392"} Sep 29 17:58:58 crc kubenswrapper[4592]: I0929 17:58:58.183035 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:58:58 crc kubenswrapper[4592]: E0929 17:58:58.183663 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:59:00 crc kubenswrapper[4592]: I0929 17:59:00.632345 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:00 crc kubenswrapper[4592]: I0929 17:59:00.632760 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:00 crc kubenswrapper[4592]: I0929 17:59:00.685127 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:00 crc kubenswrapper[4592]: I0929 17:59:00.710520 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m4m85" podStartSLOduration=6.300515012 podStartE2EDuration="9.710497942s" podCreationTimestamp="2025-09-29 17:58:51 +0000 UTC" firstStartedPulling="2025-09-29 17:58:52.793282643 +0000 UTC m=+4062.941060324" lastFinishedPulling="2025-09-29 17:58:56.203265573 +0000 UTC m=+4066.351043254" observedRunningTime="2025-09-29 17:58:56.866364838 +0000 UTC m=+4067.014142519" watchObservedRunningTime="2025-09-29 17:59:00.710497942 +0000 UTC m=+4070.858275623" Sep 29 17:59:00 crc kubenswrapper[4592]: I0929 17:59:00.923962 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:01 crc kubenswrapper[4592]: I0929 17:59:01.087496 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:59:01 crc kubenswrapper[4592]: I0929 17:59:01.447937 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:01 crc kubenswrapper[4592]: I0929 17:59:01.448167 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:01 crc kubenswrapper[4592]: I0929 17:59:01.507256 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:01 crc kubenswrapper[4592]: I0929 17:59:01.958068 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:02 crc kubenswrapper[4592]: I0929 17:59:02.894094 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gk48g" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="registry-server" containerID="cri-o://31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4" gracePeriod=2 Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.397404 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.489428 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.493137 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bms5\" (UniqueName: \"kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5\") pod \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.493276 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities\") pod \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.493408 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content\") pod \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\" (UID: \"877c8e7b-06e6-4703-9bd5-a0e36b32175b\") " Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.495470 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities" (OuterVolumeSpecName: "utilities") pod "877c8e7b-06e6-4703-9bd5-a0e36b32175b" (UID: "877c8e7b-06e6-4703-9bd5-a0e36b32175b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.512079 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5" (OuterVolumeSpecName: "kube-api-access-7bms5") pod "877c8e7b-06e6-4703-9bd5-a0e36b32175b" (UID: "877c8e7b-06e6-4703-9bd5-a0e36b32175b"). InnerVolumeSpecName "kube-api-access-7bms5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.518636 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "877c8e7b-06e6-4703-9bd5-a0e36b32175b" (UID: "877c8e7b-06e6-4703-9bd5-a0e36b32175b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.595476 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.595521 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bms5\" (UniqueName: \"kubernetes.io/projected/877c8e7b-06e6-4703-9bd5-a0e36b32175b-kube-api-access-7bms5\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.595536 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/877c8e7b-06e6-4703-9bd5-a0e36b32175b-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.906087 4592 generic.go:334] "Generic (PLEG): container finished" podID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerID="31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4" exitCode=0 Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.906216 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gk48g" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.906221 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerDied","Data":"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4"} Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.907097 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gk48g" event={"ID":"877c8e7b-06e6-4703-9bd5-a0e36b32175b","Type":"ContainerDied","Data":"167ff174881e6a1fe5c393f0f3d61661e3e618d2d4cbde2fc2cb6e843ca46e52"} Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.907174 4592 scope.go:117] "RemoveContainer" containerID="31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.907566 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m4m85" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="registry-server" containerID="cri-o://ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392" gracePeriod=2 Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.961436 4592 scope.go:117] "RemoveContainer" containerID="7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5" Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.967367 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:59:03 crc kubenswrapper[4592]: I0929 17:59:03.981237 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gk48g"] Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.143549 4592 scope.go:117] "RemoveContainer" containerID="0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.182689 4592 scope.go:117] "RemoveContainer" containerID="31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4" Sep 29 17:59:04 crc kubenswrapper[4592]: E0929 17:59:04.184345 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4\": container with ID starting with 31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4 not found: ID does not exist" containerID="31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.184402 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4"} err="failed to get container status \"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4\": rpc error: code = NotFound desc = could not find container \"31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4\": container with ID starting with 31917f80b856136d995252fcf17aa950dcae1169e38a4a77245779aac01449f4 not found: ID does not exist" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.184424 4592 scope.go:117] "RemoveContainer" containerID="7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5" Sep 29 17:59:04 crc kubenswrapper[4592]: E0929 17:59:04.185172 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5\": container with ID starting with 7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5 not found: ID does not exist" containerID="7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.185195 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5"} err="failed to get container status \"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5\": rpc error: code = NotFound desc = could not find container \"7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5\": container with ID starting with 7f43f94ac1e24b16e932960200e744be02570d8d4ecd420277334615455923f5 not found: ID does not exist" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.185212 4592 scope.go:117] "RemoveContainer" containerID="0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7" Sep 29 17:59:04 crc kubenswrapper[4592]: E0929 17:59:04.186480 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7\": container with ID starting with 0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7 not found: ID does not exist" containerID="0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.186510 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7"} err="failed to get container status \"0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7\": rpc error: code = NotFound desc = could not find container \"0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7\": container with ID starting with 0ce0dba23e2e3935648e68114c7bdab721d4ec0362bf92bf66afa2a2850007d7 not found: ID does not exist" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.463331 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.555237 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities\") pod \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.555327 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj8jw\" (UniqueName: \"kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw\") pod \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.555373 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content\") pod \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\" (UID: \"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3\") " Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.558823 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities" (OuterVolumeSpecName: "utilities") pod "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" (UID: "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.561910 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw" (OuterVolumeSpecName: "kube-api-access-sj8jw") pod "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" (UID: "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3"). InnerVolumeSpecName "kube-api-access-sj8jw". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.598009 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" (UID: "e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.658229 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.658258 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj8jw\" (UniqueName: \"kubernetes.io/projected/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-kube-api-access-sj8jw\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.658268 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.940306 4592 generic.go:334] "Generic (PLEG): container finished" podID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerID="ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392" exitCode=0 Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.940375 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerDied","Data":"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392"} Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.940390 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4m85" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.940425 4592 scope.go:117] "RemoveContainer" containerID="ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.940410 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4m85" event={"ID":"e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3","Type":"ContainerDied","Data":"35e8e1d2393b3f5c42be5c3c8cb8dbf0ba6318ab9c0b73e004d448719331b814"} Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.987423 4592 scope.go:117] "RemoveContainer" containerID="3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e" Sep 29 17:59:04 crc kubenswrapper[4592]: I0929 17:59:04.990969 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.015670 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m4m85"] Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.029110 4592 scope.go:117] "RemoveContainer" containerID="954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.088333 4592 scope.go:117] "RemoveContainer" containerID="ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392" Sep 29 17:59:05 crc kubenswrapper[4592]: E0929 17:59:05.088734 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392\": container with ID starting with ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392 not found: ID does not exist" containerID="ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.088780 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392"} err="failed to get container status \"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392\": rpc error: code = NotFound desc = could not find container \"ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392\": container with ID starting with ddbeba004ee0d998eb22b92c68cf2994ef2898e91f1ce52a9e49ecb1a7f01392 not found: ID does not exist" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.088814 4592 scope.go:117] "RemoveContainer" containerID="3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e" Sep 29 17:59:05 crc kubenswrapper[4592]: E0929 17:59:05.089766 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e\": container with ID starting with 3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e not found: ID does not exist" containerID="3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.089799 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e"} err="failed to get container status \"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e\": rpc error: code = NotFound desc = could not find container \"3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e\": container with ID starting with 3287033e002f1380510a72845df72d0d7b3a5e6b36b67f0ad0b9a60df0d4966e not found: ID does not exist" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.089826 4592 scope.go:117] "RemoveContainer" containerID="954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f" Sep 29 17:59:05 crc kubenswrapper[4592]: E0929 17:59:05.090114 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f\": container with ID starting with 954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f not found: ID does not exist" containerID="954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.090167 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f"} err="failed to get container status \"954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f\": rpc error: code = NotFound desc = could not find container \"954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f\": container with ID starting with 954051b86dee855e0d57337f64ab55ddd71852a0657bd4e1f3ee9fbbb28d9c8f not found: ID does not exist" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.195003 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" path="/var/lib/kubelet/pods/877c8e7b-06e6-4703-9bd5-a0e36b32175b/volumes" Sep 29 17:59:05 crc kubenswrapper[4592]: I0929 17:59:05.195678 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" path="/var/lib/kubelet/pods/e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3/volumes" Sep 29 17:59:12 crc kubenswrapper[4592]: I0929 17:59:12.183639 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:59:12 crc kubenswrapper[4592]: E0929 17:59:12.184252 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:59:23 crc kubenswrapper[4592]: I0929 17:59:23.183486 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:59:23 crc kubenswrapper[4592]: E0929 17:59:23.185052 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:59:36 crc kubenswrapper[4592]: I0929 17:59:36.183203 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:59:36 crc kubenswrapper[4592]: E0929 17:59:36.184040 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 17:59:50 crc kubenswrapper[4592]: I0929 17:59:50.184684 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 17:59:50 crc kubenswrapper[4592]: E0929 17:59:50.185447 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.164977 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr"] Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166266 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="extract-content" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166290 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="extract-content" Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166317 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166329 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166359 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="extract-utilities" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166371 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="extract-utilities" Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166393 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="extract-utilities" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166403 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="extract-utilities" Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166435 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166446 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: E0929 18:00:00.166461 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="extract-content" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166473 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="extract-content" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166774 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6dd8f4a-ff95-4a03-b0b4-752e2827c9b3" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.166797 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="877c8e7b-06e6-4703-9bd5-a0e36b32175b" containerName="registry-server" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.167830 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.170490 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.181760 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.195859 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr"] Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.229898 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.230135 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.230224 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d82x\" (UniqueName: \"kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.331619 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.331703 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.331732 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d82x\" (UniqueName: \"kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.332581 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.336896 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.348568 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d82x\" (UniqueName: \"kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x\") pod \"collect-profiles-29319480-5jjzr\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.499489 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:00 crc kubenswrapper[4592]: I0929 18:00:00.972527 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr"] Sep 29 18:00:01 crc kubenswrapper[4592]: I0929 18:00:01.193121 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 18:00:01 crc kubenswrapper[4592]: E0929 18:00:01.193584 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:00:01 crc kubenswrapper[4592]: I0929 18:00:01.566098 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" event={"ID":"79cb8390-3965-4d2a-a032-a528face5588","Type":"ContainerStarted","Data":"ff8ef64c56a2a661745f0f08ae92cf96badc561229eb9b7a1a356e1897b8c4d3"} Sep 29 18:00:01 crc kubenswrapper[4592]: I0929 18:00:01.566352 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" event={"ID":"79cb8390-3965-4d2a-a032-a528face5588","Type":"ContainerStarted","Data":"c1bb1c76e79f6eb10e8ca0e9e7c28e15c98576a2b37425ef63bfd2464220b86d"} Sep 29 18:00:01 crc kubenswrapper[4592]: I0929 18:00:01.586113 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" podStartSLOduration=1.586093346 podStartE2EDuration="1.586093346s" podCreationTimestamp="2025-09-29 18:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:00:01.585321415 +0000 UTC m=+4131.733099096" watchObservedRunningTime="2025-09-29 18:00:01.586093346 +0000 UTC m=+4131.733871027" Sep 29 18:00:02 crc kubenswrapper[4592]: I0929 18:00:02.579760 4592 generic.go:334] "Generic (PLEG): container finished" podID="79cb8390-3965-4d2a-a032-a528face5588" containerID="ff8ef64c56a2a661745f0f08ae92cf96badc561229eb9b7a1a356e1897b8c4d3" exitCode=0 Sep 29 18:00:02 crc kubenswrapper[4592]: I0929 18:00:02.579813 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" event={"ID":"79cb8390-3965-4d2a-a032-a528face5588","Type":"ContainerDied","Data":"ff8ef64c56a2a661745f0f08ae92cf96badc561229eb9b7a1a356e1897b8c4d3"} Sep 29 18:00:03 crc kubenswrapper[4592]: I0929 18:00:03.992455 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.008755 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d82x\" (UniqueName: \"kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x\") pod \"79cb8390-3965-4d2a-a032-a528face5588\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.008829 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume\") pod \"79cb8390-3965-4d2a-a032-a528face5588\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.008872 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume\") pod \"79cb8390-3965-4d2a-a032-a528face5588\" (UID: \"79cb8390-3965-4d2a-a032-a528face5588\") " Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.009444 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume" (OuterVolumeSpecName: "config-volume") pod "79cb8390-3965-4d2a-a032-a528face5588" (UID: "79cb8390-3965-4d2a-a032-a528face5588"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.016373 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x" (OuterVolumeSpecName: "kube-api-access-4d82x") pod "79cb8390-3965-4d2a-a032-a528face5588" (UID: "79cb8390-3965-4d2a-a032-a528face5588"). InnerVolumeSpecName "kube-api-access-4d82x". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.016549 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "79cb8390-3965-4d2a-a032-a528face5588" (UID: "79cb8390-3965-4d2a-a032-a528face5588"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.111653 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d82x\" (UniqueName: \"kubernetes.io/projected/79cb8390-3965-4d2a-a032-a528face5588-kube-api-access-4d82x\") on node \"crc\" DevicePath \"\"" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.111693 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/79cb8390-3965-4d2a-a032-a528face5588-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.111706 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/79cb8390-3965-4d2a-a032-a528face5588-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.300433 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq"] Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.307250 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319435-jb2fq"] Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.598667 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" event={"ID":"79cb8390-3965-4d2a-a032-a528face5588","Type":"ContainerDied","Data":"c1bb1c76e79f6eb10e8ca0e9e7c28e15c98576a2b37425ef63bfd2464220b86d"} Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.598702 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1bb1c76e79f6eb10e8ca0e9e7c28e15c98576a2b37425ef63bfd2464220b86d" Sep 29 18:00:04 crc kubenswrapper[4592]: I0929 18:00:04.598750 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319480-5jjzr" Sep 29 18:00:05 crc kubenswrapper[4592]: I0929 18:00:05.200938 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f64e86-1b9e-4c06-8020-5e7d3c488c01" path="/var/lib/kubelet/pods/81f64e86-1b9e-4c06-8020-5e7d3c488c01/volumes" Sep 29 18:00:14 crc kubenswrapper[4592]: I0929 18:00:14.183019 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 18:00:14 crc kubenswrapper[4592]: E0929 18:00:14.183665 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:00:22 crc kubenswrapper[4592]: I0929 18:00:22.964672 4592 scope.go:117] "RemoveContainer" containerID="5dc15646fe904fee748442c867975a622f8b3209bb00cec68cba66171511f0e4" Sep 29 18:00:26 crc kubenswrapper[4592]: I0929 18:00:26.183202 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 18:00:26 crc kubenswrapper[4592]: E0929 18:00:26.183685 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:00:41 crc kubenswrapper[4592]: I0929 18:00:41.193788 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 18:00:41 crc kubenswrapper[4592]: I0929 18:00:41.948119 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac"} Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.163233 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29319481-pq7gv"] Sep 29 18:01:00 crc kubenswrapper[4592]: E0929 18:01:00.164089 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79cb8390-3965-4d2a-a032-a528face5588" containerName="collect-profiles" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.164101 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="79cb8390-3965-4d2a-a032-a528face5588" containerName="collect-profiles" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.164312 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="79cb8390-3965-4d2a-a032-a528face5588" containerName="collect-profiles" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.164946 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.198582 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319481-pq7gv"] Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.270453 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj9s5\" (UniqueName: \"kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.270561 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.270643 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.270694 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.372234 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.372566 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj9s5\" (UniqueName: \"kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.372772 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.372969 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.378550 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.385984 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.388182 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.393274 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj9s5\" (UniqueName: \"kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5\") pod \"keystone-cron-29319481-pq7gv\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.497816 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:00 crc kubenswrapper[4592]: I0929 18:01:00.972387 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29319481-pq7gv"] Sep 29 18:01:01 crc kubenswrapper[4592]: I0929 18:01:01.143301 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319481-pq7gv" event={"ID":"76943e31-a07e-46f1-865f-dcaa47257729","Type":"ContainerStarted","Data":"c98ff12177c0343484e2a69ac8010284cda796b54a4391bab44a82bf58b45cc8"} Sep 29 18:01:02 crc kubenswrapper[4592]: I0929 18:01:02.152789 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319481-pq7gv" event={"ID":"76943e31-a07e-46f1-865f-dcaa47257729","Type":"ContainerStarted","Data":"6124f3b1c8ac201e48b1dcc13012dc04b59bd8d5df78624fd1dfc3582925374b"} Sep 29 18:01:02 crc kubenswrapper[4592]: I0929 18:01:02.171478 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29319481-pq7gv" podStartSLOduration=2.171455149 podStartE2EDuration="2.171455149s" podCreationTimestamp="2025-09-29 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:01:02.167264055 +0000 UTC m=+4192.315041746" watchObservedRunningTime="2025-09-29 18:01:02.171455149 +0000 UTC m=+4192.319232840" Sep 29 18:01:05 crc kubenswrapper[4592]: I0929 18:01:05.178712 4592 generic.go:334] "Generic (PLEG): container finished" podID="76943e31-a07e-46f1-865f-dcaa47257729" containerID="6124f3b1c8ac201e48b1dcc13012dc04b59bd8d5df78624fd1dfc3582925374b" exitCode=0 Sep 29 18:01:05 crc kubenswrapper[4592]: I0929 18:01:05.178776 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319481-pq7gv" event={"ID":"76943e31-a07e-46f1-865f-dcaa47257729","Type":"ContainerDied","Data":"6124f3b1c8ac201e48b1dcc13012dc04b59bd8d5df78624fd1dfc3582925374b"} Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.543118 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.705600 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys\") pod \"76943e31-a07e-46f1-865f-dcaa47257729\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.705664 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wj9s5\" (UniqueName: \"kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5\") pod \"76943e31-a07e-46f1-865f-dcaa47257729\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.705831 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data\") pod \"76943e31-a07e-46f1-865f-dcaa47257729\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.705920 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle\") pod \"76943e31-a07e-46f1-865f-dcaa47257729\" (UID: \"76943e31-a07e-46f1-865f-dcaa47257729\") " Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.712966 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5" (OuterVolumeSpecName: "kube-api-access-wj9s5") pod "76943e31-a07e-46f1-865f-dcaa47257729" (UID: "76943e31-a07e-46f1-865f-dcaa47257729"). InnerVolumeSpecName "kube-api-access-wj9s5". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.714470 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "76943e31-a07e-46f1-865f-dcaa47257729" (UID: "76943e31-a07e-46f1-865f-dcaa47257729"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.740833 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76943e31-a07e-46f1-865f-dcaa47257729" (UID: "76943e31-a07e-46f1-865f-dcaa47257729"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.767554 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data" (OuterVolumeSpecName: "config-data") pod "76943e31-a07e-46f1-865f-dcaa47257729" (UID: "76943e31-a07e-46f1-865f-dcaa47257729"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.808281 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.808322 4592 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.808333 4592 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76943e31-a07e-46f1-865f-dcaa47257729-fernet-keys\") on node \"crc\" DevicePath \"\"" Sep 29 18:01:06 crc kubenswrapper[4592]: I0929 18:01:06.808342 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wj9s5\" (UniqueName: \"kubernetes.io/projected/76943e31-a07e-46f1-865f-dcaa47257729-kube-api-access-wj9s5\") on node \"crc\" DevicePath \"\"" Sep 29 18:01:07 crc kubenswrapper[4592]: I0929 18:01:07.195950 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29319481-pq7gv" event={"ID":"76943e31-a07e-46f1-865f-dcaa47257729","Type":"ContainerDied","Data":"c98ff12177c0343484e2a69ac8010284cda796b54a4391bab44a82bf58b45cc8"} Sep 29 18:01:07 crc kubenswrapper[4592]: I0929 18:01:07.195991 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c98ff12177c0343484e2a69ac8010284cda796b54a4391bab44a82bf58b45cc8" Sep 29 18:01:07 crc kubenswrapper[4592]: I0929 18:01:07.196039 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29319481-pq7gv" Sep 29 18:03:00 crc kubenswrapper[4592]: I0929 18:03:00.883385 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:03:00 crc kubenswrapper[4592]: I0929 18:03:00.883996 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:03:30 crc kubenswrapper[4592]: I0929 18:03:30.883573 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:03:30 crc kubenswrapper[4592]: I0929 18:03:30.884094 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:04:00 crc kubenswrapper[4592]: I0929 18:04:00.883635 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:04:00 crc kubenswrapper[4592]: I0929 18:04:00.884217 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:04:00 crc kubenswrapper[4592]: I0929 18:04:00.884278 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 18:04:00 crc kubenswrapper[4592]: I0929 18:04:00.885072 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 18:04:00 crc kubenswrapper[4592]: I0929 18:04:00.885139 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac" gracePeriod=600 Sep 29 18:04:01 crc kubenswrapper[4592]: I0929 18:04:01.935434 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac" exitCode=0 Sep 29 18:04:01 crc kubenswrapper[4592]: I0929 18:04:01.935520 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac"} Sep 29 18:04:01 crc kubenswrapper[4592]: I0929 18:04:01.937339 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362"} Sep 29 18:04:01 crc kubenswrapper[4592]: I0929 18:04:01.938281 4592 scope.go:117] "RemoveContainer" containerID="1ef752d8d5d9ca839120bf050535ffb2921901eed6d0eafa634f8e82d70206fc" Sep 29 18:05:04 crc kubenswrapper[4592]: I0929 18:05:04.529655 4592 generic.go:334] "Generic (PLEG): container finished" podID="1f57b8e4-0399-410d-a4ae-14451f3832f2" containerID="c64299989dae8a9bb6bc33c8aba8da844f6579c839f1d9047d3809be45c0ca25" exitCode=0 Sep 29 18:05:04 crc kubenswrapper[4592]: I0929 18:05:04.529735 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1f57b8e4-0399-410d-a4ae-14451f3832f2","Type":"ContainerDied","Data":"c64299989dae8a9bb6bc33c8aba8da844f6579c839f1d9047d3809be45c0ca25"} Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.477347 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.553369 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"1f57b8e4-0399-410d-a4ae-14451f3832f2","Type":"ContainerDied","Data":"b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745"} Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.553403 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0859cfcc904e418b0343d358ce0eb59914f1a1a8dc909b5cc5025e88da6e745" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.553430 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575609 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575690 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575724 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575759 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575814 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.575882 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v7fx\" (UniqueName: \"kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.576018 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.576058 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.576081 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data\") pod \"1f57b8e4-0399-410d-a4ae-14451f3832f2\" (UID: \"1f57b8e4-0399-410d-a4ae-14451f3832f2\") " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.577723 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.583279 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "test-operator-logs") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.583528 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.583799 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data" (OuterVolumeSpecName: "config-data") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.583973 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx" (OuterVolumeSpecName: "kube-api-access-7v7fx") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "kube-api-access-7v7fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.609407 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.619752 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.620806 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.642019 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1f57b8e4-0399-410d-a4ae-14451f3832f2" (UID: "1f57b8e4-0399-410d-a4ae-14451f3832f2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.678735 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v7fx\" (UniqueName: \"kubernetes.io/projected/1f57b8e4-0399-410d-a4ae-14451f3832f2-kube-api-access-7v7fx\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.678784 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.678795 4592 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ca-certs\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.678804 4592 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1f57b8e4-0399-410d-a4ae-14451f3832f2-config-data\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.678894 4592 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.680211 4592 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.680233 4592 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.680243 4592 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/1f57b8e4-0399-410d-a4ae-14451f3832f2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.680254 4592 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1f57b8e4-0399-410d-a4ae-14451f3832f2-ssh-key\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.703317 4592 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Sep 29 18:05:06 crc kubenswrapper[4592]: I0929 18:05:06.781353 4592 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.283443 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 18:05:11 crc kubenswrapper[4592]: E0929 18:05:11.284455 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f57b8e4-0399-410d-a4ae-14451f3832f2" containerName="tempest-tests-tempest-tests-runner" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.284472 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f57b8e4-0399-410d-a4ae-14451f3832f2" containerName="tempest-tests-tempest-tests-runner" Sep 29 18:05:11 crc kubenswrapper[4592]: E0929 18:05:11.284511 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76943e31-a07e-46f1-865f-dcaa47257729" containerName="keystone-cron" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.284519 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="76943e31-a07e-46f1-865f-dcaa47257729" containerName="keystone-cron" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.284741 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f57b8e4-0399-410d-a4ae-14451f3832f2" containerName="tempest-tests-tempest-tests-runner" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.284758 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="76943e31-a07e-46f1-865f-dcaa47257729" containerName="keystone-cron" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.285528 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.293207 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.295745 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-7lmcp" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.370419 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkwv9\" (UniqueName: \"kubernetes.io/projected/e85e8316-b254-45bb-b405-ac12c75f9433-kube-api-access-bkwv9\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.370531 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.472594 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.472863 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkwv9\" (UniqueName: \"kubernetes.io/projected/e85e8316-b254-45bb-b405-ac12c75f9433-kube-api-access-bkwv9\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.475953 4592 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.509788 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkwv9\" (UniqueName: \"kubernetes.io/projected/e85e8316-b254-45bb-b405-ac12c75f9433-kube-api-access-bkwv9\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.524205 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"e85e8316-b254-45bb-b405-ac12c75f9433\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:11 crc kubenswrapper[4592]: I0929 18:05:11.640616 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Sep 29 18:05:12 crc kubenswrapper[4592]: I0929 18:05:12.113922 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 18:05:12 crc kubenswrapper[4592]: I0929 18:05:12.120605 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Sep 29 18:05:12 crc kubenswrapper[4592]: I0929 18:05:12.621569 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e85e8316-b254-45bb-b405-ac12c75f9433","Type":"ContainerStarted","Data":"9c7a077a13dcf889358450cddf7124c994356d7febb915ee3531a7c3338d0fc3"} Sep 29 18:05:13 crc kubenswrapper[4592]: I0929 18:05:13.650450 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"e85e8316-b254-45bb-b405-ac12c75f9433","Type":"ContainerStarted","Data":"dbb1f0b5c7a063a717d8589bba9fa31d84ef78f181d597698689c96b495381e7"} Sep 29 18:05:13 crc kubenswrapper[4592]: I0929 18:05:13.672547 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.6580439089999999 podStartE2EDuration="2.67251869s" podCreationTimestamp="2025-09-29 18:05:11 +0000 UTC" firstStartedPulling="2025-09-29 18:05:12.113663425 +0000 UTC m=+4442.261441116" lastFinishedPulling="2025-09-29 18:05:13.128138216 +0000 UTC m=+4443.275915897" observedRunningTime="2025-09-29 18:05:13.669844577 +0000 UTC m=+4443.817622288" watchObservedRunningTime="2025-09-29 18:05:13.67251869 +0000 UTC m=+4443.820296411" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.735908 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-c56gt/must-gather-gc2nb"] Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.737819 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.742442 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-c56gt"/"default-dockercfg-frnvk" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.743379 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-c56gt"/"kube-root-ca.crt" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.745766 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-c56gt"/"openshift-service-ca.crt" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.747108 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-c56gt/must-gather-gc2nb"] Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.873193 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.873553 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j76wb\" (UniqueName: \"kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.976039 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.976122 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j76wb\" (UniqueName: \"kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.977011 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:31 crc kubenswrapper[4592]: I0929 18:05:31.993026 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j76wb\" (UniqueName: \"kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb\") pod \"must-gather-gc2nb\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:32 crc kubenswrapper[4592]: I0929 18:05:32.066122 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:05:32 crc kubenswrapper[4592]: I0929 18:05:32.568822 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-c56gt/must-gather-gc2nb"] Sep 29 18:05:32 crc kubenswrapper[4592]: I0929 18:05:32.914033 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/must-gather-gc2nb" event={"ID":"19733981-aad9-4fa7-8bc0-7e6255fe82b3","Type":"ContainerStarted","Data":"abfc02447e943b84100879da1adbe26641d4f3550062cf6c57d07d3a47119e03"} Sep 29 18:05:37 crc kubenswrapper[4592]: I0929 18:05:37.966442 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/must-gather-gc2nb" event={"ID":"19733981-aad9-4fa7-8bc0-7e6255fe82b3","Type":"ContainerStarted","Data":"56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9"} Sep 29 18:05:37 crc kubenswrapper[4592]: I0929 18:05:37.966886 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/must-gather-gc2nb" event={"ID":"19733981-aad9-4fa7-8bc0-7e6255fe82b3","Type":"ContainerStarted","Data":"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f"} Sep 29 18:05:37 crc kubenswrapper[4592]: I0929 18:05:37.987942 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-c56gt/must-gather-gc2nb" podStartSLOduration=2.840548337 podStartE2EDuration="6.987920563s" podCreationTimestamp="2025-09-29 18:05:31 +0000 UTC" firstStartedPulling="2025-09-29 18:05:32.587375883 +0000 UTC m=+4462.735153564" lastFinishedPulling="2025-09-29 18:05:36.734748109 +0000 UTC m=+4466.882525790" observedRunningTime="2025-09-29 18:05:37.978286671 +0000 UTC m=+4468.126064362" watchObservedRunningTime="2025-09-29 18:05:37.987920563 +0000 UTC m=+4468.135698254" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.057761 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-c56gt/crc-debug-w5rqw"] Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.061734 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.088732 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.089001 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcmjz\" (UniqueName: \"kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.197453 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.197541 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcmjz\" (UniqueName: \"kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.198503 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.235199 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcmjz\" (UniqueName: \"kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz\") pod \"crc-debug-w5rqw\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:43 crc kubenswrapper[4592]: I0929 18:05:43.382602 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:05:44 crc kubenswrapper[4592]: I0929 18:05:44.022169 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" event={"ID":"3fa4f835-d593-46d4-a7c4-5569cbf29d8e","Type":"ContainerStarted","Data":"b9f7b821f612e79550ba949faf9245682b1a6c3ca42aabbf68dec74acebe3b8b"} Sep 29 18:05:55 crc kubenswrapper[4592]: I0929 18:05:55.136129 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" event={"ID":"3fa4f835-d593-46d4-a7c4-5569cbf29d8e","Type":"ContainerStarted","Data":"85d18ffe56f2c937e82d0186556ae8d2c638e2036a7b8b97452ae5caf4cae13a"} Sep 29 18:05:55 crc kubenswrapper[4592]: I0929 18:05:55.157359 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" podStartSLOduration=1.291203796 podStartE2EDuration="12.157338239s" podCreationTimestamp="2025-09-29 18:05:43 +0000 UTC" firstStartedPulling="2025-09-29 18:05:43.42938658 +0000 UTC m=+4473.577164261" lastFinishedPulling="2025-09-29 18:05:54.295521023 +0000 UTC m=+4484.443298704" observedRunningTime="2025-09-29 18:05:55.154863512 +0000 UTC m=+4485.302641233" watchObservedRunningTime="2025-09-29 18:05:55.157338239 +0000 UTC m=+4485.305115950" Sep 29 18:06:30 crc kubenswrapper[4592]: I0929 18:06:30.884054 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:06:30 crc kubenswrapper[4592]: I0929 18:06:30.884738 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:07:00 crc kubenswrapper[4592]: I0929 18:07:00.883501 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:07:00 crc kubenswrapper[4592]: I0929 18:07:00.884139 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:07:14 crc kubenswrapper[4592]: I0929 18:07:14.603029 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5b969b6866-rjf85_16bb91be-d91d-476e-a81d-44ef92c11718/barbican-api-log/0.log" Sep 29 18:07:14 crc kubenswrapper[4592]: I0929 18:07:14.612490 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5b969b6866-rjf85_16bb91be-d91d-476e-a81d-44ef92c11718/barbican-api/0.log" Sep 29 18:07:14 crc kubenswrapper[4592]: I0929 18:07:14.925500 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fdf4f774d-zgzql_cd181b6d-4f45-415c-8038-4bf077b0a747/barbican-keystone-listener/0.log" Sep 29 18:07:14 crc kubenswrapper[4592]: I0929 18:07:14.942807 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fdf4f774d-zgzql_cd181b6d-4f45-415c-8038-4bf077b0a747/barbican-keystone-listener-log/0.log" Sep 29 18:07:15 crc kubenswrapper[4592]: I0929 18:07:15.186903 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78c8db585f-2qfs6_bda783a8-49d1-48be-9b21-695b1a673b1a/barbican-worker-log/0.log" Sep 29 18:07:15 crc kubenswrapper[4592]: I0929 18:07:15.203044 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78c8db585f-2qfs6_bda783a8-49d1-48be-9b21-695b1a673b1a/barbican-worker/0.log" Sep 29 18:07:15 crc kubenswrapper[4592]: I0929 18:07:15.470894 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd_5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:15 crc kubenswrapper[4592]: I0929 18:07:15.771230 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/ceilometer-notification-agent/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.062671 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/proxy-httpd/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.097109 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/ceilometer-central-agent/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.322932 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/sg-core/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.537196 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_aa1f23ba-8aae-4a33-8946-7cfcd7087e6e/cinder-api/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.696518 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_aa1f23ba-8aae-4a33-8946-7cfcd7087e6e/cinder-api-log/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.789129 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c/cinder-scheduler/0.log" Sep 29 18:07:16 crc kubenswrapper[4592]: I0929 18:07:16.930966 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c/probe/0.log" Sep 29 18:07:17 crc kubenswrapper[4592]: I0929 18:07:17.075081 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t_5f2c16e8-c860-42a9-9888-63e22d9d57b2/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:17 crc kubenswrapper[4592]: I0929 18:07:17.209683 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq_33421f74-e3cd-4318-b751-ed324d225253/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:17 crc kubenswrapper[4592]: I0929 18:07:17.869139 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/init/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.101792 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/init/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.314038 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/dnsmasq-dns/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.406415 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s_b0275d99-00b1-4174-ab01-598af7ed19b7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.567812 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_f396e95c-bf51-4e4d-9dc7-76188423316b/glance-httpd/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.639824 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_f396e95c-bf51-4e4d-9dc7-76188423316b/glance-log/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.852585 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e42af79d-fc77-4451-8550-cbd866e1eabe/glance-httpd/0.log" Sep 29 18:07:18 crc kubenswrapper[4592]: I0929 18:07:18.931253 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e42af79d-fc77-4451-8550-cbd866e1eabe/glance-log/0.log" Sep 29 18:07:19 crc kubenswrapper[4592]: I0929 18:07:19.221525 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon/2.log" Sep 29 18:07:19 crc kubenswrapper[4592]: I0929 18:07:19.368023 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon/1.log" Sep 29 18:07:19 crc kubenswrapper[4592]: I0929 18:07:19.579569 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon-log/0.log" Sep 29 18:07:19 crc kubenswrapper[4592]: I0929 18:07:19.790416 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p_3b5035f5-4d62-4661-8067-869b1e54997e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:20 crc kubenswrapper[4592]: I0929 18:07:20.036741 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-79dcn_8896fbe8-6b4f-41d8-a85c-88ea182d4cf6/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:20 crc kubenswrapper[4592]: I0929 18:07:20.378183 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319481-pq7gv_76943e31-a07e-46f1-865f-dcaa47257729/keystone-cron/0.log" Sep 29 18:07:20 crc kubenswrapper[4592]: I0929 18:07:20.440522 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-584d46f4c7-tdlrl_728e33a1-191b-4c9d-a2d2-e569433182ea/keystone-api/0.log" Sep 29 18:07:20 crc kubenswrapper[4592]: I0929 18:07:20.513568 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_1e655a7a-19bc-4d0d-ab87-2c906903d7c8/kube-state-metrics/0.log" Sep 29 18:07:20 crc kubenswrapper[4592]: I0929 18:07:20.610869 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6_40c378c3-0f92-474d-aaed-f3cd105e4714/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:21 crc kubenswrapper[4592]: I0929 18:07:21.295322 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5588c77f49-qmt48_177b2eb7-9986-4985-bd07-1b5a5d86f678/neutron-httpd/0.log" Sep 29 18:07:21 crc kubenswrapper[4592]: I0929 18:07:21.403133 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh_29930a89-f89e-4db7-85e6-4f47c1033098/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:21 crc kubenswrapper[4592]: I0929 18:07:21.427591 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5588c77f49-qmt48_177b2eb7-9986-4985-bd07-1b5a5d86f678/neutron-api/0.log" Sep 29 18:07:21 crc kubenswrapper[4592]: I0929 18:07:21.707292 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_1ed52369-92ab-4da4-a517-1555c79b0a38/memcached/0.log" Sep 29 18:07:22 crc kubenswrapper[4592]: I0929 18:07:22.242675 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ecda42b4-525c-464f-ab13-394434750d4a/nova-cell0-conductor-conductor/0.log" Sep 29 18:07:22 crc kubenswrapper[4592]: I0929 18:07:22.625251 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_bb359aaf-6eae-40d2-a14e-3a7a47e3a286/nova-cell1-conductor-conductor/0.log" Sep 29 18:07:22 crc kubenswrapper[4592]: I0929 18:07:22.714131 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_96bba62b-2b30-4b47-af6b-5bf6e32275a1/nova-api-log/0.log" Sep 29 18:07:22 crc kubenswrapper[4592]: I0929 18:07:22.864597 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a998c4c4-de7e-4c25-b2c3-87d54e3b9e56/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 18:07:22 crc kubenswrapper[4592]: I0929 18:07:22.928649 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_96bba62b-2b30-4b47-af6b-5bf6e32275a1/nova-api-api/0.log" Sep 29 18:07:23 crc kubenswrapper[4592]: I0929 18:07:23.052468 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-rrsnr_e5d09077-a84b-4b69-974b-5286b27f244f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:23 crc kubenswrapper[4592]: I0929 18:07:23.284050 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_483fd1ac-005e-4d6f-8d1d-03a192a3b366/nova-metadata-log/0.log" Sep 29 18:07:23 crc kubenswrapper[4592]: I0929 18:07:23.965674 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/mysql-bootstrap/0.log" Sep 29 18:07:24 crc kubenswrapper[4592]: I0929 18:07:24.249050 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d200c10f-b6bd-4908-b79e-7ab4ae10587d/nova-scheduler-scheduler/0.log" Sep 29 18:07:24 crc kubenswrapper[4592]: I0929 18:07:24.294873 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/mysql-bootstrap/0.log" Sep 29 18:07:24 crc kubenswrapper[4592]: I0929 18:07:24.317610 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/galera/0.log" Sep 29 18:07:24 crc kubenswrapper[4592]: I0929 18:07:24.423232 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_483fd1ac-005e-4d6f-8d1d-03a192a3b366/nova-metadata-metadata/0.log" Sep 29 18:07:24 crc kubenswrapper[4592]: I0929 18:07:24.919509 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/mysql-bootstrap/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.191439 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/mysql-bootstrap/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.220032 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/galera/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.247435 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_d6e91b2c-f8ba-4654-8431-a50545a2c37b/openstackclient/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.506453 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jfzwf_d813cc31-c8ba-48c0-b523-3d2b3fbc3341/ovn-controller/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.583877 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8xpsr_7132c9e8-ff15-414a-b384-4a266f3c84f8/openstack-network-exporter/0.log" Sep 29 18:07:25 crc kubenswrapper[4592]: I0929 18:07:25.753913 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server-init/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.268084 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.316263 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server-init/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.327087 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovs-vswitchd/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.469745 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qdjjh_121a0489-01a2-492b-a564-2718b687e621/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.552875 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8bd365a0-dba3-4f81-a229-a344e01a6eca/ovn-northd/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.566082 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8bd365a0-dba3-4f81-a229-a344e01a6eca/openstack-network-exporter/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.808641 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b9e2d243-07ca-4b99-a929-9ae3321c3274/openstack-network-exporter/0.log" Sep 29 18:07:26 crc kubenswrapper[4592]: I0929 18:07:26.818443 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b9e2d243-07ca-4b99-a929-9ae3321c3274/ovsdbserver-nb/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.038035 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1549c716-ca22-42ff-9cea-e63e50856936/openstack-network-exporter/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.115568 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1549c716-ca22-42ff-9cea-e63e50856936/ovsdbserver-sb/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.180306 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6cbb8cd48-47ckj_f53c28fe-50d3-49b2-926e-fe4f166838ce/placement-api/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.381430 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/setup-container/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.460806 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6cbb8cd48-47ckj_f53c28fe-50d3-49b2-926e-fe4f166838ce/placement-log/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.982410 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/rabbitmq/0.log" Sep 29 18:07:27 crc kubenswrapper[4592]: I0929 18:07:27.986518 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/setup-container/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.002646 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/setup-container/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.171363 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/setup-container/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.240934 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/rabbitmq/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.394227 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9_fffd0339-970b-41b0-b868-de31bfdc29b0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.466107 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-gqflz_894df7ab-ced1-483a-98a5-2e7e496f1578/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.634287 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm_abd885d0-dbac-4845-8a3e-2454abf4d652/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.730929 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-gvrb2_6a007423-0554-48b3-b38a-d23f2509aacd/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:28 crc kubenswrapper[4592]: I0929 18:07:28.896776 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2nfwh_af7efc5b-eb67-4660-92ae-77d6efa85b0f/ssh-known-hosts-edpm-deployment/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.087350 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d4767f8f-mgqs7_f1171449-a884-43cb-b254-c2ee282ea3a0/proxy-httpd/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.147426 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d4767f8f-mgqs7_f1171449-a884-43cb-b254-c2ee282ea3a0/proxy-server/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.232474 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-kk56p_b87c021d-8ea5-4e65-9a34-68e38d02b6c3/swift-ring-rebalance/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.358060 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-auditor/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.461171 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-reaper/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.479080 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-replicator/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.564826 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-server/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.584095 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-auditor/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.710803 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-replicator/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.716289 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-server/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.807520 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-auditor/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.893258 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-updater/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.980763 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-expirer/0.log" Sep 29 18:07:29 crc kubenswrapper[4592]: I0929 18:07:29.990408 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-replicator/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.024995 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-server/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.136049 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-updater/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.249756 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/rsync/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.309699 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/swift-recon-cron/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.488603 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h_d76cbdef-0253-4fd5-abc2-bec6b0b6df81/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.580703 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_1f57b8e4-0399-410d-a4ae-14451f3832f2/tempest-tests-tempest-tests-runner/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.720642 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_e85e8316-b254-45bb-b405-ac12c75f9433/test-operator-logs-container/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.784577 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9_0641d7e4-c868-48bd-948d-186401c6f3c7/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.883317 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.883383 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.883430 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.884247 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 18:07:30 crc kubenswrapper[4592]: I0929 18:07:30.884317 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" gracePeriod=600 Sep 29 18:07:31 crc kubenswrapper[4592]: E0929 18:07:31.007209 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:07:32 crc kubenswrapper[4592]: I0929 18:07:32.009174 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362"} Sep 29 18:07:32 crc kubenswrapper[4592]: I0929 18:07:32.009180 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" exitCode=0 Sep 29 18:07:32 crc kubenswrapper[4592]: I0929 18:07:32.009431 4592 scope.go:117] "RemoveContainer" containerID="c4fe199ac0cc63fa88e38c75c6ca4ca2e4911d1e135fbfe55830bf9433e7efac" Sep 29 18:07:32 crc kubenswrapper[4592]: I0929 18:07:32.009953 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:07:32 crc kubenswrapper[4592]: E0929 18:07:32.010244 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:07:44 crc kubenswrapper[4592]: I0929 18:07:44.183514 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:07:44 crc kubenswrapper[4592]: E0929 18:07:44.184261 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.215831 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.219327 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.219472 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.369792 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph4wm\" (UniqueName: \"kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.370267 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.370349 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.472796 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.472931 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph4wm\" (UniqueName: \"kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.473016 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.473456 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.473677 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.500879 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph4wm\" (UniqueName: \"kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm\") pod \"community-operators-snfp8\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:51 crc kubenswrapper[4592]: I0929 18:07:51.555478 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:07:52 crc kubenswrapper[4592]: I0929 18:07:52.151289 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:07:52 crc kubenswrapper[4592]: W0929 18:07:52.162569 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffb4c43d_964b_460c_9be3_c4c55416760e.slice/crio-07ae310aa5e43211e5091c8bd8b85e7afd202e0c7270de527d859f16b1934ea9 WatchSource:0}: Error finding container 07ae310aa5e43211e5091c8bd8b85e7afd202e0c7270de527d859f16b1934ea9: Status 404 returned error can't find the container with id 07ae310aa5e43211e5091c8bd8b85e7afd202e0c7270de527d859f16b1934ea9 Sep 29 18:07:52 crc kubenswrapper[4592]: I0929 18:07:52.198401 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerStarted","Data":"07ae310aa5e43211e5091c8bd8b85e7afd202e0c7270de527d859f16b1934ea9"} Sep 29 18:07:53 crc kubenswrapper[4592]: I0929 18:07:53.212116 4592 generic.go:334] "Generic (PLEG): container finished" podID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerID="f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104" exitCode=0 Sep 29 18:07:53 crc kubenswrapper[4592]: I0929 18:07:53.212172 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerDied","Data":"f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104"} Sep 29 18:07:54 crc kubenswrapper[4592]: I0929 18:07:54.228082 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerStarted","Data":"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80"} Sep 29 18:07:55 crc kubenswrapper[4592]: I0929 18:07:55.240038 4592 generic.go:334] "Generic (PLEG): container finished" podID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerID="ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80" exitCode=0 Sep 29 18:07:55 crc kubenswrapper[4592]: I0929 18:07:55.240083 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerDied","Data":"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80"} Sep 29 18:07:56 crc kubenswrapper[4592]: I0929 18:07:56.253805 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerStarted","Data":"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529"} Sep 29 18:07:56 crc kubenswrapper[4592]: I0929 18:07:56.281757 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-snfp8" podStartSLOduration=2.7751900369999998 podStartE2EDuration="5.281741171s" podCreationTimestamp="2025-09-29 18:07:51 +0000 UTC" firstStartedPulling="2025-09-29 18:07:53.21674688 +0000 UTC m=+4603.364524571" lastFinishedPulling="2025-09-29 18:07:55.723298024 +0000 UTC m=+4605.871075705" observedRunningTime="2025-09-29 18:07:56.27911274 +0000 UTC m=+4606.426890451" watchObservedRunningTime="2025-09-29 18:07:56.281741171 +0000 UTC m=+4606.429518852" Sep 29 18:07:59 crc kubenswrapper[4592]: I0929 18:07:59.183592 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:07:59 crc kubenswrapper[4592]: E0929 18:07:59.185084 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:08:01 crc kubenswrapper[4592]: I0929 18:08:01.555784 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:01 crc kubenswrapper[4592]: I0929 18:08:01.556237 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:01 crc kubenswrapper[4592]: I0929 18:08:01.843236 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:02 crc kubenswrapper[4592]: I0929 18:08:02.328942 4592 generic.go:334] "Generic (PLEG): container finished" podID="3fa4f835-d593-46d4-a7c4-5569cbf29d8e" containerID="85d18ffe56f2c937e82d0186556ae8d2c638e2036a7b8b97452ae5caf4cae13a" exitCode=0 Sep 29 18:08:02 crc kubenswrapper[4592]: I0929 18:08:02.329012 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" event={"ID":"3fa4f835-d593-46d4-a7c4-5569cbf29d8e","Type":"ContainerDied","Data":"85d18ffe56f2c937e82d0186556ae8d2c638e2036a7b8b97452ae5caf4cae13a"} Sep 29 18:08:02 crc kubenswrapper[4592]: I0929 18:08:02.407938 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:02 crc kubenswrapper[4592]: I0929 18:08:02.460675 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.474731 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.506112 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-w5rqw"] Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.513347 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-w5rqw"] Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.530064 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host\") pod \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.530354 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcmjz\" (UniqueName: \"kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz\") pod \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\" (UID: \"3fa4f835-d593-46d4-a7c4-5569cbf29d8e\") " Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.530366 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host" (OuterVolumeSpecName: "host") pod "3fa4f835-d593-46d4-a7c4-5569cbf29d8e" (UID: "3fa4f835-d593-46d4-a7c4-5569cbf29d8e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.531268 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.536929 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz" (OuterVolumeSpecName: "kube-api-access-fcmjz") pod "3fa4f835-d593-46d4-a7c4-5569cbf29d8e" (UID: "3fa4f835-d593-46d4-a7c4-5569cbf29d8e"). InnerVolumeSpecName "kube-api-access-fcmjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:08:03 crc kubenswrapper[4592]: I0929 18:08:03.634038 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcmjz\" (UniqueName: \"kubernetes.io/projected/3fa4f835-d593-46d4-a7c4-5569cbf29d8e-kube-api-access-fcmjz\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.354384 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-snfp8" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="registry-server" containerID="cri-o://4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529" gracePeriod=2 Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.354725 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-w5rqw" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.355368 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9f7b821f612e79550ba949faf9245682b1a6c3ca42aabbf68dec74acebe3b8b" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.739835 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-c56gt/crc-debug-4q95m"] Sep 29 18:08:04 crc kubenswrapper[4592]: E0929 18:08:04.740620 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fa4f835-d593-46d4-a7c4-5569cbf29d8e" containerName="container-00" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.740637 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fa4f835-d593-46d4-a7c4-5569cbf29d8e" containerName="container-00" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.740880 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fa4f835-d593-46d4-a7c4-5569cbf29d8e" containerName="container-00" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.741637 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.755259 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv6mr\" (UniqueName: \"kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.755330 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.856651 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv6mr\" (UniqueName: \"kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.856726 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.856897 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.861909 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.891239 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv6mr\" (UniqueName: \"kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr\") pod \"crc-debug-4q95m\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.958496 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph4wm\" (UniqueName: \"kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm\") pod \"ffb4c43d-964b-460c-9be3-c4c55416760e\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.958701 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content\") pod \"ffb4c43d-964b-460c-9be3-c4c55416760e\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.958790 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities\") pod \"ffb4c43d-964b-460c-9be3-c4c55416760e\" (UID: \"ffb4c43d-964b-460c-9be3-c4c55416760e\") " Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.959772 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities" (OuterVolumeSpecName: "utilities") pod "ffb4c43d-964b-460c-9be3-c4c55416760e" (UID: "ffb4c43d-964b-460c-9be3-c4c55416760e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:08:04 crc kubenswrapper[4592]: I0929 18:08:04.963413 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm" (OuterVolumeSpecName: "kube-api-access-ph4wm") pod "ffb4c43d-964b-460c-9be3-c4c55416760e" (UID: "ffb4c43d-964b-460c-9be3-c4c55416760e"). InnerVolumeSpecName "kube-api-access-ph4wm". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.004415 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ffb4c43d-964b-460c-9be3-c4c55416760e" (UID: "ffb4c43d-964b-460c-9be3-c4c55416760e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.059911 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.059945 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb4c43d-964b-460c-9be3-c4c55416760e-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.059955 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph4wm\" (UniqueName: \"kubernetes.io/projected/ffb4c43d-964b-460c-9be3-c4c55416760e-kube-api-access-ph4wm\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.067930 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.207465 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fa4f835-d593-46d4-a7c4-5569cbf29d8e" path="/var/lib/kubelet/pods/3fa4f835-d593-46d4-a7c4-5569cbf29d8e/volumes" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.368928 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-4q95m" event={"ID":"669d840d-6565-49f1-a811-e6aaee8f3d00","Type":"ContainerStarted","Data":"530abb4e9bf38923cb875562d9c129dde43bc8c7c151f02248fd3501c7cf60c6"} Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.369003 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-4q95m" event={"ID":"669d840d-6565-49f1-a811-e6aaee8f3d00","Type":"ContainerStarted","Data":"3ce2e8622053ceaa5e3c16c4d278f22d0662735bb85e5a60347d3148cacc9b91"} Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.373933 4592 generic.go:334] "Generic (PLEG): container finished" podID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerID="4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529" exitCode=0 Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.373987 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerDied","Data":"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529"} Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.374076 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-snfp8" event={"ID":"ffb4c43d-964b-460c-9be3-c4c55416760e","Type":"ContainerDied","Data":"07ae310aa5e43211e5091c8bd8b85e7afd202e0c7270de527d859f16b1934ea9"} Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.374107 4592 scope.go:117] "RemoveContainer" containerID="4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.374014 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-snfp8" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.390669 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-c56gt/crc-debug-4q95m" podStartSLOduration=1.3906396380000001 podStartE2EDuration="1.390639638s" podCreationTimestamp="2025-09-29 18:08:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:08:05.388384827 +0000 UTC m=+4615.536162508" watchObservedRunningTime="2025-09-29 18:08:05.390639638 +0000 UTC m=+4615.538417359" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.408666 4592 scope.go:117] "RemoveContainer" containerID="ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.424966 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.435168 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-snfp8"] Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.447383 4592 scope.go:117] "RemoveContainer" containerID="f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.478108 4592 scope.go:117] "RemoveContainer" containerID="4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529" Sep 29 18:08:05 crc kubenswrapper[4592]: E0929 18:08:05.479664 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529\": container with ID starting with 4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529 not found: ID does not exist" containerID="4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.479711 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529"} err="failed to get container status \"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529\": rpc error: code = NotFound desc = could not find container \"4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529\": container with ID starting with 4c9af91ab25dab2cc2203fc06b04b1759d8755e013b1e04282f61119389ff529 not found: ID does not exist" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.479739 4592 scope.go:117] "RemoveContainer" containerID="ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80" Sep 29 18:08:05 crc kubenswrapper[4592]: E0929 18:08:05.480262 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80\": container with ID starting with ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80 not found: ID does not exist" containerID="ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.480299 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80"} err="failed to get container status \"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80\": rpc error: code = NotFound desc = could not find container \"ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80\": container with ID starting with ff866c29b25c859e7d29bcdfa311a0a2e1ace6f9324bded2a2d2669230f8ab80 not found: ID does not exist" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.480319 4592 scope.go:117] "RemoveContainer" containerID="f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104" Sep 29 18:08:05 crc kubenswrapper[4592]: E0929 18:08:05.481478 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104\": container with ID starting with f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104 not found: ID does not exist" containerID="f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104" Sep 29 18:08:05 crc kubenswrapper[4592]: I0929 18:08:05.481601 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104"} err="failed to get container status \"f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104\": rpc error: code = NotFound desc = could not find container \"f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104\": container with ID starting with f4432dc5a971dc977bfbb7ed1a2596874cd910dfa5f3cf4c3d8696eac8f2b104 not found: ID does not exist" Sep 29 18:08:06 crc kubenswrapper[4592]: I0929 18:08:06.385410 4592 generic.go:334] "Generic (PLEG): container finished" podID="669d840d-6565-49f1-a811-e6aaee8f3d00" containerID="530abb4e9bf38923cb875562d9c129dde43bc8c7c151f02248fd3501c7cf60c6" exitCode=0 Sep 29 18:08:06 crc kubenswrapper[4592]: I0929 18:08:06.385497 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-4q95m" event={"ID":"669d840d-6565-49f1-a811-e6aaee8f3d00","Type":"ContainerDied","Data":"530abb4e9bf38923cb875562d9c129dde43bc8c7c151f02248fd3501c7cf60c6"} Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.199397 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" path="/var/lib/kubelet/pods/ffb4c43d-964b-460c-9be3-c4c55416760e/volumes" Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.488684 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.605103 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host\") pod \"669d840d-6565-49f1-a811-e6aaee8f3d00\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.605175 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host" (OuterVolumeSpecName: "host") pod "669d840d-6565-49f1-a811-e6aaee8f3d00" (UID: "669d840d-6565-49f1-a811-e6aaee8f3d00"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.605224 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv6mr\" (UniqueName: \"kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr\") pod \"669d840d-6565-49f1-a811-e6aaee8f3d00\" (UID: \"669d840d-6565-49f1-a811-e6aaee8f3d00\") " Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.605696 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/669d840d-6565-49f1-a811-e6aaee8f3d00-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.612363 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr" (OuterVolumeSpecName: "kube-api-access-rv6mr") pod "669d840d-6565-49f1-a811-e6aaee8f3d00" (UID: "669d840d-6565-49f1-a811-e6aaee8f3d00"). InnerVolumeSpecName "kube-api-access-rv6mr". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:08:07 crc kubenswrapper[4592]: I0929 18:08:07.706811 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv6mr\" (UniqueName: \"kubernetes.io/projected/669d840d-6565-49f1-a811-e6aaee8f3d00-kube-api-access-rv6mr\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.404722 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-4q95m" event={"ID":"669d840d-6565-49f1-a811-e6aaee8f3d00","Type":"ContainerDied","Data":"3ce2e8622053ceaa5e3c16c4d278f22d0662735bb85e5a60347d3148cacc9b91"} Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.404773 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ce2e8622053ceaa5e3c16c4d278f22d0662735bb85e5a60347d3148cacc9b91" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.404835 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-4q95m" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.628954 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:08 crc kubenswrapper[4592]: E0929 18:08:08.629785 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="extract-utilities" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.629801 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="extract-utilities" Sep 29 18:08:08 crc kubenswrapper[4592]: E0929 18:08:08.629847 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="registry-server" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.629854 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="registry-server" Sep 29 18:08:08 crc kubenswrapper[4592]: E0929 18:08:08.629870 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="extract-content" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.629877 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="extract-content" Sep 29 18:08:08 crc kubenswrapper[4592]: E0929 18:08:08.629916 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="669d840d-6565-49f1-a811-e6aaee8f3d00" containerName="container-00" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.629923 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="669d840d-6565-49f1-a811-e6aaee8f3d00" containerName="container-00" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.630871 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="669d840d-6565-49f1-a811-e6aaee8f3d00" containerName="container-00" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.630929 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb4c43d-964b-460c-9be3-c4c55416760e" containerName="registry-server" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.633497 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.670517 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.822960 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.823072 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6p8m\" (UniqueName: \"kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.823099 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.924118 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.924423 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6p8m\" (UniqueName: \"kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.924505 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.924934 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.925113 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.942661 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6p8m\" (UniqueName: \"kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m\") pod \"redhat-operators-ftrlk\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:08 crc kubenswrapper[4592]: I0929 18:08:08.958132 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:09 crc kubenswrapper[4592]: I0929 18:08:09.481818 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:10 crc kubenswrapper[4592]: I0929 18:08:10.422703 4592 generic.go:334] "Generic (PLEG): container finished" podID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerID="96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c" exitCode=0 Sep 29 18:08:10 crc kubenswrapper[4592]: I0929 18:08:10.422738 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerDied","Data":"96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c"} Sep 29 18:08:10 crc kubenswrapper[4592]: I0929 18:08:10.425319 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerStarted","Data":"8ab171f69c81877219db9938162001c1eb25048781b3ac4f26ff651d537542f9"} Sep 29 18:08:11 crc kubenswrapper[4592]: I0929 18:08:11.434811 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerStarted","Data":"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c"} Sep 29 18:08:13 crc kubenswrapper[4592]: I0929 18:08:13.182843 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:08:13 crc kubenswrapper[4592]: E0929 18:08:13.183296 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:08:13 crc kubenswrapper[4592]: I0929 18:08:13.421758 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-4q95m"] Sep 29 18:08:13 crc kubenswrapper[4592]: I0929 18:08:13.430587 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-4q95m"] Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.673041 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-c56gt/crc-debug-7g98b"] Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.674176 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.734418 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9d76\" (UniqueName: \"kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.734597 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.836786 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9d76\" (UniqueName: \"kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.836909 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.837122 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:14 crc kubenswrapper[4592]: I0929 18:08:14.872191 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9d76\" (UniqueName: \"kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76\") pod \"crc-debug-7g98b\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.006636 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:15 crc kubenswrapper[4592]: W0929 18:08:15.045677 4592 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57d0bc9a_d88f_4502_9390_910f2e95564d.slice/crio-5364fbdbc4eaaf8168891a15976a4f55708f49219da124fdd27f7cc685e8e58f WatchSource:0}: Error finding container 5364fbdbc4eaaf8168891a15976a4f55708f49219da124fdd27f7cc685e8e58f: Status 404 returned error can't find the container with id 5364fbdbc4eaaf8168891a15976a4f55708f49219da124fdd27f7cc685e8e58f Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.200477 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="669d840d-6565-49f1-a811-e6aaee8f3d00" path="/var/lib/kubelet/pods/669d840d-6565-49f1-a811-e6aaee8f3d00/volumes" Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.468678 4592 generic.go:334] "Generic (PLEG): container finished" podID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerID="2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c" exitCode=0 Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.468733 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerDied","Data":"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c"} Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.470830 4592 generic.go:334] "Generic (PLEG): container finished" podID="57d0bc9a-d88f-4502-9390-910f2e95564d" containerID="d39634ee6a3fb1ef1fe9d912f6a9432b381b34ca09033b62f3a5a338a14303ae" exitCode=0 Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.470894 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-7g98b" event={"ID":"57d0bc9a-d88f-4502-9390-910f2e95564d","Type":"ContainerDied","Data":"d39634ee6a3fb1ef1fe9d912f6a9432b381b34ca09033b62f3a5a338a14303ae"} Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.470919 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/crc-debug-7g98b" event={"ID":"57d0bc9a-d88f-4502-9390-910f2e95564d","Type":"ContainerStarted","Data":"5364fbdbc4eaaf8168891a15976a4f55708f49219da124fdd27f7cc685e8e58f"} Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.533927 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-7g98b"] Sep 29 18:08:15 crc kubenswrapper[4592]: I0929 18:08:15.540627 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-c56gt/crc-debug-7g98b"] Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.480116 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerStarted","Data":"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0"} Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.515274 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ftrlk" podStartSLOduration=3.023105877 podStartE2EDuration="8.515254746s" podCreationTimestamp="2025-09-29 18:08:08 +0000 UTC" firstStartedPulling="2025-09-29 18:08:10.427270558 +0000 UTC m=+4620.575048239" lastFinishedPulling="2025-09-29 18:08:15.919419387 +0000 UTC m=+4626.067197108" observedRunningTime="2025-09-29 18:08:16.510374982 +0000 UTC m=+4626.658152683" watchObservedRunningTime="2025-09-29 18:08:16.515254746 +0000 UTC m=+4626.663032427" Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.589804 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.772477 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host\") pod \"57d0bc9a-d88f-4502-9390-910f2e95564d\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.772545 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9d76\" (UniqueName: \"kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76\") pod \"57d0bc9a-d88f-4502-9390-910f2e95564d\" (UID: \"57d0bc9a-d88f-4502-9390-910f2e95564d\") " Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.772588 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host" (OuterVolumeSpecName: "host") pod "57d0bc9a-d88f-4502-9390-910f2e95564d" (UID: "57d0bc9a-d88f-4502-9390-910f2e95564d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.773195 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/57d0bc9a-d88f-4502-9390-910f2e95564d-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.787358 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76" (OuterVolumeSpecName: "kube-api-access-w9d76") pod "57d0bc9a-d88f-4502-9390-910f2e95564d" (UID: "57d0bc9a-d88f-4502-9390-910f2e95564d"). InnerVolumeSpecName "kube-api-access-w9d76". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:08:16 crc kubenswrapper[4592]: I0929 18:08:16.874595 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9d76\" (UniqueName: \"kubernetes.io/projected/57d0bc9a-d88f-4502-9390-910f2e95564d-kube-api-access-w9d76\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.198287 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57d0bc9a-d88f-4502-9390-910f2e95564d" path="/var/lib/kubelet/pods/57d0bc9a-d88f-4502-9390-910f2e95564d/volumes" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.494675 4592 scope.go:117] "RemoveContainer" containerID="d39634ee6a3fb1ef1fe9d912f6a9432b381b34ca09033b62f3a5a338a14303ae" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.494722 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/crc-debug-7g98b" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.604273 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.799518 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.898631 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:08:17 crc kubenswrapper[4592]: I0929 18:08:17.922342 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.106724 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.138637 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.307489 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/extract/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.415775 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-55xvk_7ef58432-073e-43a5-bc36-38cb3611b118/kube-rbac-proxy/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.456208 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-55xvk_7ef58432-073e-43a5-bc36-38cb3611b118/manager/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.635068 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-5p559_de451eb0-13ae-4fab-a6f3-3cc8fb77566f/kube-rbac-proxy/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.779688 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-5p559_de451eb0-13ae-4fab-a6f3-3cc8fb77566f/manager/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.807399 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-bj7fn_ba256bd8-c14c-458e-b919-2feedb3a0c46/kube-rbac-proxy/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.922002 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-bj7fn_ba256bd8-c14c-458e-b919-2feedb3a0c46/manager/0.log" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.958524 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:18 crc kubenswrapper[4592]: I0929 18:08:18.958571 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.013372 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k969c_f8504fb5-9c3b-4b51-bf22-31c6bcdacad4/kube-rbac-proxy/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.170263 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-f8v56_9c565c72-206a-42a7-943d-c55fd9065e5f/kube-rbac-proxy/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.176112 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k969c_f8504fb5-9c3b-4b51-bf22-31c6bcdacad4/manager/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.222189 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-f8v56_9c565c72-206a-42a7-943d-c55fd9065e5f/manager/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.410395 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-p4r66_1d74dab2-fe04-4218-8b91-4b958b0ad39d/kube-rbac-proxy/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.465529 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-p4r66_1d74dab2-fe04-4218-8b91-4b958b0ad39d/manager/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.766552 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-b8fbf_a4e61264-26ad-4012-be6c-4d6596b4ab27/kube-rbac-proxy/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.835551 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-b8fbf_a4e61264-26ad-4012-be6c-4d6596b4ab27/manager/0.log" Sep 29 18:08:19 crc kubenswrapper[4592]: I0929 18:08:19.851303 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-kjsck_a4b81165-b69a-40fa-b875-6d138351d6e6/kube-rbac-proxy/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.007299 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-kjsck_a4b81165-b69a-40fa-b875-6d138351d6e6/manager/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.013511 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ftrlk" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" probeResult="failure" output=< Sep 29 18:08:20 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 18:08:20 crc kubenswrapper[4592]: > Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.134703 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-9n2d2_dff5de8b-2910-4e5a-a80a-089c649039cd/kube-rbac-proxy/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.253843 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-9n2d2_dff5de8b-2910-4e5a-a80a-089c649039cd/manager/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.392046 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-cffzv_bfa2f914-2596-49e6-bb75-760663a69813/kube-rbac-proxy/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.509288 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-cffzv_bfa2f914-2596-49e6-bb75-760663a69813/manager/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.680496 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-ssmqx_6bf183ea-90d6-4aff-9e61-d4cc3692fe08/kube-rbac-proxy/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.753658 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-ssmqx_6bf183ea-90d6-4aff-9e61-d4cc3692fe08/manager/0.log" Sep 29 18:08:20 crc kubenswrapper[4592]: I0929 18:08:20.888046 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-7sgxz_fd9f041b-9fd6-4d50-bc82-35fd86eea539/kube-rbac-proxy/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.030786 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-7sgxz_fd9f041b-9fd6-4d50-bc82-35fd86eea539/manager/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.068239 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-hg6b2_5746404b-3a0f-4851-9de9-28e4e7ef8f1f/kube-rbac-proxy/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.320762 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-hg6b2_5746404b-3a0f-4851-9de9-28e4e7ef8f1f/manager/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.363570 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fc5kq_87bb1f2b-bc93-4b10-aa27-b8efd9ba669a/kube-rbac-proxy/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.382424 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fc5kq_87bb1f2b-bc93-4b10-aa27-b8efd9ba669a/manager/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.564256 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-fqfbf_965c0641-f6e8-44e3-a8a1-32028665b9e2/kube-rbac-proxy/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.665045 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-fqfbf_965c0641-f6e8-44e3-a8a1-32028665b9e2/manager/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.668322 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b9c4c58f5-fdd5n_8da8ce1f-60e7-4381-975e-daf9c5225b10/kube-rbac-proxy/0.log" Sep 29 18:08:21 crc kubenswrapper[4592]: I0929 18:08:21.866685 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7bf7677558-jj6jr_83cbe230-dcbc-4c90-befd-35f5082eaba6/kube-rbac-proxy/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.153833 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7bf7677558-jj6jr_83cbe230-dcbc-4c90-befd-35f5082eaba6/operator/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.256899 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-bc9lr_99fc0436-2ce9-4df7-ad2b-4ddb6dff9983/registry-server/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.431243 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-l97v2_bb38bf88-f05d-4e0e-8923-66b2097e247c/kube-rbac-proxy/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.470924 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-l97v2_bb38bf88-f05d-4e0e-8923-66b2097e247c/manager/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.518666 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j5st9_1100f7ed-81d3-49d8-9852-867de93e273b/kube-rbac-proxy/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.695480 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j5st9_1100f7ed-81d3-49d8-9852-867de93e273b/manager/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.787874 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-2pcjb_a3543654-318a-48ed-8109-a76d758b231d/operator/0.log" Sep 29 18:08:22 crc kubenswrapper[4592]: I0929 18:08:22.929956 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b9c4c58f5-fdd5n_8da8ce1f-60e7-4381-975e-daf9c5225b10/manager/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.000493 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-pfzkm_9974276a-24ba-4ca1-9c70-f85e17e9c10c/kube-rbac-proxy/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.055463 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-pfzkm_9974276a-24ba-4ca1-9c70-f85e17e9c10c/manager/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.123340 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-77pht_5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4/kube-rbac-proxy/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.237257 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-77pht_5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4/manager/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.325073 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-mxl29_33a1eea4-82d2-438a-a844-6539c3016172/kube-rbac-proxy/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.435129 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-mxl29_33a1eea4-82d2-438a-a844-6539c3016172/manager/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.439155 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-mqwzl_50b8b2f8-551f-4379-84b6-5b217fa8b50c/manager/0.log" Sep 29 18:08:23 crc kubenswrapper[4592]: I0929 18:08:23.468926 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-mqwzl_50b8b2f8-551f-4379-84b6-5b217fa8b50c/kube-rbac-proxy/0.log" Sep 29 18:08:25 crc kubenswrapper[4592]: I0929 18:08:25.186072 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:08:25 crc kubenswrapper[4592]: E0929 18:08:25.186979 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:08:30 crc kubenswrapper[4592]: I0929 18:08:30.004866 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ftrlk" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" probeResult="failure" output=< Sep 29 18:08:30 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 18:08:30 crc kubenswrapper[4592]: > Sep 29 18:08:39 crc kubenswrapper[4592]: I0929 18:08:39.017955 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:39 crc kubenswrapper[4592]: I0929 18:08:39.075221 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:39 crc kubenswrapper[4592]: I0929 18:08:39.837530 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:40 crc kubenswrapper[4592]: I0929 18:08:40.182807 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:08:40 crc kubenswrapper[4592]: E0929 18:08:40.183104 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:08:40 crc kubenswrapper[4592]: I0929 18:08:40.704402 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ftrlk" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" containerID="cri-o://765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0" gracePeriod=2 Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.173547 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.264034 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities\") pod \"e757d89d-6cdb-4837-bc76-dcb37a064250\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.264488 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6p8m\" (UniqueName: \"kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m\") pod \"e757d89d-6cdb-4837-bc76-dcb37a064250\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.264515 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content\") pod \"e757d89d-6cdb-4837-bc76-dcb37a064250\" (UID: \"e757d89d-6cdb-4837-bc76-dcb37a064250\") " Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.267411 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities" (OuterVolumeSpecName: "utilities") pod "e757d89d-6cdb-4837-bc76-dcb37a064250" (UID: "e757d89d-6cdb-4837-bc76-dcb37a064250"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.278355 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m" (OuterVolumeSpecName: "kube-api-access-c6p8m") pod "e757d89d-6cdb-4837-bc76-dcb37a064250" (UID: "e757d89d-6cdb-4837-bc76-dcb37a064250"). InnerVolumeSpecName "kube-api-access-c6p8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.341465 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e757d89d-6cdb-4837-bc76-dcb37a064250" (UID: "e757d89d-6cdb-4837-bc76-dcb37a064250"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.366874 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6p8m\" (UniqueName: \"kubernetes.io/projected/e757d89d-6cdb-4837-bc76-dcb37a064250-kube-api-access-c6p8m\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.366912 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.366921 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e757d89d-6cdb-4837-bc76-dcb37a064250-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.714213 4592 generic.go:334] "Generic (PLEG): container finished" podID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerID="765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0" exitCode=0 Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.714257 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerDied","Data":"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0"} Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.714304 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ftrlk" event={"ID":"e757d89d-6cdb-4837-bc76-dcb37a064250","Type":"ContainerDied","Data":"8ab171f69c81877219db9938162001c1eb25048781b3ac4f26ff651d537542f9"} Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.714331 4592 scope.go:117] "RemoveContainer" containerID="765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.714475 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ftrlk" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.741185 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.753390 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ftrlk"] Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.756325 4592 scope.go:117] "RemoveContainer" containerID="2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.810840 4592 scope.go:117] "RemoveContainer" containerID="96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.834868 4592 scope.go:117] "RemoveContainer" containerID="765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0" Sep 29 18:08:41 crc kubenswrapper[4592]: E0929 18:08:41.835630 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0\": container with ID starting with 765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0 not found: ID does not exist" containerID="765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.835656 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0"} err="failed to get container status \"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0\": rpc error: code = NotFound desc = could not find container \"765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0\": container with ID starting with 765ba1c7eb0dce528949034af56b76633f8d4d522754f09103d93fa0ab58f8a0 not found: ID does not exist" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.835676 4592 scope.go:117] "RemoveContainer" containerID="2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c" Sep 29 18:08:41 crc kubenswrapper[4592]: E0929 18:08:41.836084 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c\": container with ID starting with 2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c not found: ID does not exist" containerID="2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.836139 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c"} err="failed to get container status \"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c\": rpc error: code = NotFound desc = could not find container \"2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c\": container with ID starting with 2426ca9a7d9669ae6c69ee30bfa6ded36f1007ac9f3b2019545b7e6e2ea8824c not found: ID does not exist" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.836165 4592 scope.go:117] "RemoveContainer" containerID="96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c" Sep 29 18:08:41 crc kubenswrapper[4592]: E0929 18:08:41.836468 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c\": container with ID starting with 96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c not found: ID does not exist" containerID="96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.836496 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c"} err="failed to get container status \"96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c\": rpc error: code = NotFound desc = could not find container \"96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c\": container with ID starting with 96744393562deaf4a3cc0f16557fa5410e9c65551cf3e5b75f51e49aa3dc1b6c not found: ID does not exist" Sep 29 18:08:41 crc kubenswrapper[4592]: I0929 18:08:41.900977 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-m4z52_ff85be0b-4fe9-43fa-941f-c00f69b7f459/control-plane-machine-set-operator/0.log" Sep 29 18:08:42 crc kubenswrapper[4592]: I0929 18:08:42.077123 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-smrv8_92eb6f89-2332-47d7-a04c-19e63442c882/kube-rbac-proxy/0.log" Sep 29 18:08:42 crc kubenswrapper[4592]: I0929 18:08:42.130499 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-smrv8_92eb6f89-2332-47d7-a04c-19e63442c882/machine-api-operator/0.log" Sep 29 18:08:43 crc kubenswrapper[4592]: I0929 18:08:43.193351 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" path="/var/lib/kubelet/pods/e757d89d-6cdb-4837-bc76-dcb37a064250/volumes" Sep 29 18:08:54 crc kubenswrapper[4592]: I0929 18:08:54.183534 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:08:54 crc kubenswrapper[4592]: E0929 18:08:54.184250 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:08:54 crc kubenswrapper[4592]: I0929 18:08:54.968380 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-wnhfl_0080b650-be9f-452d-8c10-69ae3480edf2/cert-manager-controller/0.log" Sep 29 18:08:55 crc kubenswrapper[4592]: I0929 18:08:55.090649 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6phr8_26c87439-d01b-405b-9567-f2c2c83283e1/cert-manager-cainjector/0.log" Sep 29 18:08:55 crc kubenswrapper[4592]: I0929 18:08:55.215059 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-vdxkr_eb6c1a73-f740-4b79-ab2a-ccf80a36deb5/cert-manager-webhook/0.log" Sep 29 18:09:06 crc kubenswrapper[4592]: I0929 18:09:06.183088 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:09:06 crc kubenswrapper[4592]: E0929 18:09:06.183878 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.273511 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-94s8k_f5019cbd-3156-4d20-9c40-163965b4ca0b/nmstate-console-plugin/0.log" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.327787 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zgcpn_252fdf6a-56d5-473a-b492-e9b94bc89d19/nmstate-handler/0.log" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.477472 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5lhdp_15e33da6-1266-4757-ab8b-bcbd435b8d26/kube-rbac-proxy/0.log" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.496282 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5lhdp_15e33da6-1266-4757-ab8b-bcbd435b8d26/nmstate-metrics/0.log" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.523134 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-x6d4v_e9ff4f23-0699-427f-86b3-275b408c261a/nmstate-operator/0.log" Sep 29 18:09:07 crc kubenswrapper[4592]: I0929 18:09:07.656128 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-cfxmq_c9a4f2e3-e2ba-460b-92b2-a7cfda566c50/nmstate-webhook/0.log" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.048516 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:14 crc kubenswrapper[4592]: E0929 18:09:14.050670 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="extract-utilities" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.050691 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="extract-utilities" Sep 29 18:09:14 crc kubenswrapper[4592]: E0929 18:09:14.050708 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.050716 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" Sep 29 18:09:14 crc kubenswrapper[4592]: E0929 18:09:14.050727 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d0bc9a-d88f-4502-9390-910f2e95564d" containerName="container-00" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.050735 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d0bc9a-d88f-4502-9390-910f2e95564d" containerName="container-00" Sep 29 18:09:14 crc kubenswrapper[4592]: E0929 18:09:14.050754 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="extract-content" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.050761 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="extract-content" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.051011 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d0bc9a-d88f-4502-9390-910f2e95564d" containerName="container-00" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.051031 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="e757d89d-6cdb-4837-bc76-dcb37a064250" containerName="registry-server" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.054593 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.070228 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.177868 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.178225 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.178345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-887xg\" (UniqueName: \"kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.279867 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.280127 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-887xg\" (UniqueName: \"kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.280314 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.280440 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.280613 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.303369 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-887xg\" (UniqueName: \"kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg\") pod \"certified-operators-jnq9g\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.390477 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.919484 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:14 crc kubenswrapper[4592]: I0929 18:09:14.985663 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerStarted","Data":"7e3025a813a5fc513f184cdcb51886ce697f79c0e4589f350f0a413774f09d07"} Sep 29 18:09:15 crc kubenswrapper[4592]: I0929 18:09:15.997855 4592 generic.go:334] "Generic (PLEG): container finished" podID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerID="eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f" exitCode=0 Sep 29 18:09:15 crc kubenswrapper[4592]: I0929 18:09:15.997909 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerDied","Data":"eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f"} Sep 29 18:09:18 crc kubenswrapper[4592]: I0929 18:09:18.017160 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerStarted","Data":"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050"} Sep 29 18:09:18 crc kubenswrapper[4592]: E0929 18:09:18.751755 4592 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd456293_1d61_4bb1_afb5_8364ac2a2811.slice/crio-d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050.scope\": RecentStats: unable to find data in memory cache]" Sep 29 18:09:19 crc kubenswrapper[4592]: I0929 18:09:19.026251 4592 generic.go:334] "Generic (PLEG): container finished" podID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerID="d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050" exitCode=0 Sep 29 18:09:19 crc kubenswrapper[4592]: I0929 18:09:19.026296 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerDied","Data":"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050"} Sep 29 18:09:20 crc kubenswrapper[4592]: I0929 18:09:20.038674 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerStarted","Data":"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a"} Sep 29 18:09:20 crc kubenswrapper[4592]: I0929 18:09:20.059173 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jnq9g" podStartSLOduration=2.408700434 podStartE2EDuration="6.05913665s" podCreationTimestamp="2025-09-29 18:09:14 +0000 UTC" firstStartedPulling="2025-09-29 18:09:16.001476563 +0000 UTC m=+4686.149254244" lastFinishedPulling="2025-09-29 18:09:19.651912769 +0000 UTC m=+4689.799690460" observedRunningTime="2025-09-29 18:09:20.05913597 +0000 UTC m=+4690.206913671" watchObservedRunningTime="2025-09-29 18:09:20.05913665 +0000 UTC m=+4690.206914331" Sep 29 18:09:20 crc kubenswrapper[4592]: I0929 18:09:20.183047 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:09:20 crc kubenswrapper[4592]: E0929 18:09:20.183299 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.439057 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jzgm6_560cbf05-ef23-4767-bd5c-eabd7f3eb864/kube-rbac-proxy/0.log" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.551582 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jzgm6_560cbf05-ef23-4767-bd5c-eabd7f3eb864/controller/0.log" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.703390 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.914470 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.915597 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.949195 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.955648 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:23 crc kubenswrapper[4592]: I0929 18:09:23.960393 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.000914 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.009694 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.072778 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.073058 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.073179 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8jjn\" (UniqueName: \"kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.174551 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8jjn\" (UniqueName: \"kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.174646 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.174711 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.175178 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.175216 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.199389 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8jjn\" (UniqueName: \"kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn\") pod \"redhat-marketplace-g5sdt\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.279212 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.370848 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.391215 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.391510 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.436283 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.445944 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.467921 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.486522 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.675047 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.705662 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.722430 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.744290 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/controller/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.753530 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.940241 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/frr-metrics/0.log" Sep 29 18:09:24 crc kubenswrapper[4592]: I0929 18:09:24.976750 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/kube-rbac-proxy-frr/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.037314 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/kube-rbac-proxy/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.080217 4592 generic.go:334] "Generic (PLEG): container finished" podID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerID="517549be0abb16b96d39ad76770245e88fbaf03a697fd2c34bc51c7d9ffb8e83" exitCode=0 Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.081798 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerDied","Data":"517549be0abb16b96d39ad76770245e88fbaf03a697fd2c34bc51c7d9ffb8e83"} Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.081831 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerStarted","Data":"370a410803c4051c8632191faafa3081f200c83d2a79800e426ba4ab53c022e4"} Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.140693 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.203089 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/reloader/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.333948 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-lrvw2_2c5260c1-19c0-4d8b-b659-5a09f3a887da/frr-k8s-webhook-server/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.604588 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5b64bfcc84-9fvrf_764035e6-f447-4e19-a17f-c334e6270ba6/manager/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.777992 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c7645bc9b-vdzjb_7290d1e2-eecb-4663-8c34-66c35acc0726/webhook-server/0.log" Sep 29 18:09:25 crc kubenswrapper[4592]: I0929 18:09:25.973911 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z794s_8634039b-db7c-46c2-a140-a746270aa768/kube-rbac-proxy/0.log" Sep 29 18:09:26 crc kubenswrapper[4592]: I0929 18:09:26.431830 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/frr/0.log" Sep 29 18:09:26 crc kubenswrapper[4592]: I0929 18:09:26.508998 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z794s_8634039b-db7c-46c2-a140-a746270aa768/speaker/0.log" Sep 29 18:09:26 crc kubenswrapper[4592]: I0929 18:09:26.731280 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:27 crc kubenswrapper[4592]: I0929 18:09:27.101164 4592 generic.go:334] "Generic (PLEG): container finished" podID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerID="b7a9de5b291399edde11e6c0354bbef9c6cd50a734c4d19b5f9578583c4534d0" exitCode=0 Sep 29 18:09:27 crc kubenswrapper[4592]: I0929 18:09:27.101348 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jnq9g" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="registry-server" containerID="cri-o://ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a" gracePeriod=2 Sep 29 18:09:27 crc kubenswrapper[4592]: I0929 18:09:27.102382 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerDied","Data":"b7a9de5b291399edde11e6c0354bbef9c6cd50a734c4d19b5f9578583c4534d0"} Sep 29 18:09:27 crc kubenswrapper[4592]: I0929 18:09:27.980123 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.070277 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-887xg\" (UniqueName: \"kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg\") pod \"bd456293-1d61-4bb1-afb5-8364ac2a2811\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.070462 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content\") pod \"bd456293-1d61-4bb1-afb5-8364ac2a2811\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.070625 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities\") pod \"bd456293-1d61-4bb1-afb5-8364ac2a2811\" (UID: \"bd456293-1d61-4bb1-afb5-8364ac2a2811\") " Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.071480 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities" (OuterVolumeSpecName: "utilities") pod "bd456293-1d61-4bb1-afb5-8364ac2a2811" (UID: "bd456293-1d61-4bb1-afb5-8364ac2a2811"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.102949 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg" (OuterVolumeSpecName: "kube-api-access-887xg") pod "bd456293-1d61-4bb1-afb5-8364ac2a2811" (UID: "bd456293-1d61-4bb1-afb5-8364ac2a2811"). InnerVolumeSpecName "kube-api-access-887xg". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.128445 4592 generic.go:334] "Generic (PLEG): container finished" podID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerID="ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a" exitCode=0 Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.128490 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerDied","Data":"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a"} Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.128514 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnq9g" event={"ID":"bd456293-1d61-4bb1-afb5-8364ac2a2811","Type":"ContainerDied","Data":"7e3025a813a5fc513f184cdcb51886ce697f79c0e4589f350f0a413774f09d07"} Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.128529 4592 scope.go:117] "RemoveContainer" containerID="ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.128601 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnq9g" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.138009 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd456293-1d61-4bb1-afb5-8364ac2a2811" (UID: "bd456293-1d61-4bb1-afb5-8364ac2a2811"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.172752 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-887xg\" (UniqueName: \"kubernetes.io/projected/bd456293-1d61-4bb1-afb5-8364ac2a2811-kube-api-access-887xg\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.172935 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.172990 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd456293-1d61-4bb1-afb5-8364ac2a2811-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.178672 4592 scope.go:117] "RemoveContainer" containerID="d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.203533 4592 scope.go:117] "RemoveContainer" containerID="eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.360434 4592 scope.go:117] "RemoveContainer" containerID="ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a" Sep 29 18:09:28 crc kubenswrapper[4592]: E0929 18:09:28.360909 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a\": container with ID starting with ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a not found: ID does not exist" containerID="ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.361032 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a"} err="failed to get container status \"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a\": rpc error: code = NotFound desc = could not find container \"ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a\": container with ID starting with ebd694c7019427bbb8ec8796711bba883778ae7ecaa8976f81959c6df03a2d6a not found: ID does not exist" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.361123 4592 scope.go:117] "RemoveContainer" containerID="d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050" Sep 29 18:09:28 crc kubenswrapper[4592]: E0929 18:09:28.361532 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050\": container with ID starting with d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050 not found: ID does not exist" containerID="d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.361626 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050"} err="failed to get container status \"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050\": rpc error: code = NotFound desc = could not find container \"d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050\": container with ID starting with d37ac2e9a4900e7b95da03553b06cb95f64084adeda38db7b861b1126f6d9050 not found: ID does not exist" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.361689 4592 scope.go:117] "RemoveContainer" containerID="eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f" Sep 29 18:09:28 crc kubenswrapper[4592]: E0929 18:09:28.362113 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f\": container with ID starting with eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f not found: ID does not exist" containerID="eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.362224 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f"} err="failed to get container status \"eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f\": rpc error: code = NotFound desc = could not find container \"eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f\": container with ID starting with eac77baaa3349bc5577d946a9e295f2ec3643ca4c9dfb6ee26f914f75112cf8f not found: ID does not exist" Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.458613 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:28 crc kubenswrapper[4592]: I0929 18:09:28.472254 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jnq9g"] Sep 29 18:09:29 crc kubenswrapper[4592]: I0929 18:09:29.141298 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerStarted","Data":"b7d95da594011ca2ff16894e8b5b12275a19e8637d3cf2aab4f8eafaaa24c0e0"} Sep 29 18:09:29 crc kubenswrapper[4592]: I0929 18:09:29.163420 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g5sdt" podStartSLOduration=3.314748801 podStartE2EDuration="6.16340243s" podCreationTimestamp="2025-09-29 18:09:23 +0000 UTC" firstStartedPulling="2025-09-29 18:09:25.082325513 +0000 UTC m=+4695.230103194" lastFinishedPulling="2025-09-29 18:09:27.930979142 +0000 UTC m=+4698.078756823" observedRunningTime="2025-09-29 18:09:29.15568546 +0000 UTC m=+4699.303463141" watchObservedRunningTime="2025-09-29 18:09:29.16340243 +0000 UTC m=+4699.311180111" Sep 29 18:09:29 crc kubenswrapper[4592]: I0929 18:09:29.195934 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" path="/var/lib/kubelet/pods/bd456293-1d61-4bb1-afb5-8364ac2a2811/volumes" Sep 29 18:09:34 crc kubenswrapper[4592]: I0929 18:09:34.184989 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:09:34 crc kubenswrapper[4592]: E0929 18:09:34.186756 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:09:34 crc kubenswrapper[4592]: I0929 18:09:34.279908 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:34 crc kubenswrapper[4592]: I0929 18:09:34.279965 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:34 crc kubenswrapper[4592]: I0929 18:09:34.343028 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:35 crc kubenswrapper[4592]: I0929 18:09:35.254390 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:35 crc kubenswrapper[4592]: I0929 18:09:35.310804 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:37 crc kubenswrapper[4592]: I0929 18:09:37.219965 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g5sdt" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="registry-server" containerID="cri-o://b7d95da594011ca2ff16894e8b5b12275a19e8637d3cf2aab4f8eafaaa24c0e0" gracePeriod=2 Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.292627 4592 generic.go:334] "Generic (PLEG): container finished" podID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerID="b7d95da594011ca2ff16894e8b5b12275a19e8637d3cf2aab4f8eafaaa24c0e0" exitCode=0 Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.292696 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerDied","Data":"b7d95da594011ca2ff16894e8b5b12275a19e8637d3cf2aab4f8eafaaa24c0e0"} Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.414926 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.474722 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8jjn\" (UniqueName: \"kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn\") pod \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.476543 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content\") pod \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.476758 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities\") pod \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\" (UID: \"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435\") " Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.481593 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities" (OuterVolumeSpecName: "utilities") pod "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" (UID: "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.506008 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn" (OuterVolumeSpecName: "kube-api-access-k8jjn") pod "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" (UID: "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435"). InnerVolumeSpecName "kube-api-access-k8jjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.519508 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" (UID: "9858aa46-2b6d-4aee-b6e2-dd53a8d9f435"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.580495 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.580528 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:38 crc kubenswrapper[4592]: I0929 18:09:38.580537 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8jjn\" (UniqueName: \"kubernetes.io/projected/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435-kube-api-access-k8jjn\") on node \"crc\" DevicePath \"\"" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.302565 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5sdt" event={"ID":"9858aa46-2b6d-4aee-b6e2-dd53a8d9f435","Type":"ContainerDied","Data":"370a410803c4051c8632191faafa3081f200c83d2a79800e426ba4ab53c022e4"} Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.302909 4592 scope.go:117] "RemoveContainer" containerID="b7d95da594011ca2ff16894e8b5b12275a19e8637d3cf2aab4f8eafaaa24c0e0" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.302738 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5sdt" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.324183 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.326384 4592 scope.go:117] "RemoveContainer" containerID="b7a9de5b291399edde11e6c0354bbef9c6cd50a734c4d19b5f9578583c4534d0" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.340362 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5sdt"] Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.345313 4592 scope.go:117] "RemoveContainer" containerID="517549be0abb16b96d39ad76770245e88fbaf03a697fd2c34bc51c7d9ffb8e83" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.780916 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.965377 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.981330 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:09:39 crc kubenswrapper[4592]: I0929 18:09:39.994782 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.194029 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/extract/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.203373 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.204087 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.396068 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.592299 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.596952 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.598404 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.760377 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:09:40 crc kubenswrapper[4592]: I0929 18:09:40.899406 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.053039 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.195497 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" path="/var/lib/kubelet/pods/9858aa46-2b6d-4aee-b6e2-dd53a8d9f435/volumes" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.206651 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/registry-server/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.263829 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.324265 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.360105 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.545669 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.670743 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:09:41 crc kubenswrapper[4592]: I0929 18:09:41.916459 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.134637 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.210458 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.261847 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.287915 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/registry-server/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.458627 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.531868 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.540703 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/extract/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.726831 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hq85k_d9110599-7f42-4970-93fa-89f37c84fad3/marketplace-operator/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.782426 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.983337 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:09:42 crc kubenswrapper[4592]: I0929 18:09:42.990525 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.030558 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.203319 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.248461 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.435837 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/registry-server/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.498990 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.689446 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.690895 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.714889 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.887526 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:09:43 crc kubenswrapper[4592]: I0929 18:09:43.992798 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:09:44 crc kubenswrapper[4592]: I0929 18:09:44.489247 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/registry-server/0.log" Sep 29 18:09:46 crc kubenswrapper[4592]: I0929 18:09:46.190547 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:09:46 crc kubenswrapper[4592]: E0929 18:09:46.195097 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:09:58 crc kubenswrapper[4592]: I0929 18:09:58.182949 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:09:58 crc kubenswrapper[4592]: E0929 18:09:58.183910 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:10:11 crc kubenswrapper[4592]: I0929 18:10:11.200613 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:10:11 crc kubenswrapper[4592]: E0929 18:10:11.201430 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:10:26 crc kubenswrapper[4592]: I0929 18:10:26.183827 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:10:26 crc kubenswrapper[4592]: E0929 18:10:26.184693 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:10:38 crc kubenswrapper[4592]: I0929 18:10:38.183049 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:10:38 crc kubenswrapper[4592]: E0929 18:10:38.185277 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:10:51 crc kubenswrapper[4592]: I0929 18:10:51.195721 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:10:51 crc kubenswrapper[4592]: E0929 18:10:51.196429 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:11:04 crc kubenswrapper[4592]: I0929 18:11:04.182954 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:11:04 crc kubenswrapper[4592]: E0929 18:11:04.183610 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:11:15 crc kubenswrapper[4592]: I0929 18:11:15.184615 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:11:15 crc kubenswrapper[4592]: E0929 18:11:15.185822 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:11:28 crc kubenswrapper[4592]: I0929 18:11:28.183568 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:11:28 crc kubenswrapper[4592]: E0929 18:11:28.184446 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:11:43 crc kubenswrapper[4592]: I0929 18:11:43.183020 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:11:43 crc kubenswrapper[4592]: E0929 18:11:43.184086 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:11:54 crc kubenswrapper[4592]: I0929 18:11:54.183536 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:11:54 crc kubenswrapper[4592]: E0929 18:11:54.184716 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:12:05 crc kubenswrapper[4592]: I0929 18:12:05.192324 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:12:05 crc kubenswrapper[4592]: E0929 18:12:05.193414 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:12:17 crc kubenswrapper[4592]: I0929 18:12:17.187299 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:12:17 crc kubenswrapper[4592]: E0929 18:12:17.188238 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:12:18 crc kubenswrapper[4592]: I0929 18:12:18.972134 4592 generic.go:334] "Generic (PLEG): container finished" podID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerID="0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f" exitCode=0 Sep 29 18:12:18 crc kubenswrapper[4592]: I0929 18:12:18.972228 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-c56gt/must-gather-gc2nb" event={"ID":"19733981-aad9-4fa7-8bc0-7e6255fe82b3","Type":"ContainerDied","Data":"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f"} Sep 29 18:12:18 crc kubenswrapper[4592]: I0929 18:12:18.973123 4592 scope.go:117] "RemoveContainer" containerID="0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f" Sep 29 18:12:19 crc kubenswrapper[4592]: I0929 18:12:19.056557 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-c56gt_must-gather-gc2nb_19733981-aad9-4fa7-8bc0-7e6255fe82b3/gather/0.log" Sep 29 18:12:23 crc kubenswrapper[4592]: I0929 18:12:23.369642 4592 scope.go:117] "RemoveContainer" containerID="85d18ffe56f2c937e82d0186556ae8d2c638e2036a7b8b97452ae5caf4cae13a" Sep 29 18:12:28 crc kubenswrapper[4592]: I0929 18:12:28.542556 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-c56gt/must-gather-gc2nb"] Sep 29 18:12:28 crc kubenswrapper[4592]: I0929 18:12:28.543775 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-c56gt/must-gather-gc2nb" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="copy" containerID="cri-o://56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9" gracePeriod=2 Sep 29 18:12:28 crc kubenswrapper[4592]: I0929 18:12:28.553956 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-c56gt/must-gather-gc2nb"] Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.034340 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-c56gt_must-gather-gc2nb_19733981-aad9-4fa7-8bc0-7e6255fe82b3/copy/0.log" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.034984 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.087626 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-c56gt_must-gather-gc2nb_19733981-aad9-4fa7-8bc0-7e6255fe82b3/copy/0.log" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.088263 4592 generic.go:334] "Generic (PLEG): container finished" podID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerID="56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9" exitCode=143 Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.088393 4592 scope.go:117] "RemoveContainer" containerID="56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.088566 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-c56gt/must-gather-gc2nb" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.110405 4592 scope.go:117] "RemoveContainer" containerID="0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.149881 4592 scope.go:117] "RemoveContainer" containerID="56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9" Sep 29 18:12:29 crc kubenswrapper[4592]: E0929 18:12:29.150770 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9\": container with ID starting with 56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9 not found: ID does not exist" containerID="56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.150813 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9"} err="failed to get container status \"56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9\": rpc error: code = NotFound desc = could not find container \"56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9\": container with ID starting with 56417f48da327c6c13a04301662db8cd1ef6fd9c92bbd82830d1a35bfab1f2e9 not found: ID does not exist" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.150840 4592 scope.go:117] "RemoveContainer" containerID="0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f" Sep 29 18:12:29 crc kubenswrapper[4592]: E0929 18:12:29.151140 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f\": container with ID starting with 0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f not found: ID does not exist" containerID="0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.151193 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f"} err="failed to get container status \"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f\": rpc error: code = NotFound desc = could not find container \"0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f\": container with ID starting with 0b22f948a632460f81e62014727517adac7ddcd8d94675b8488bbc04ab1d5d1f not found: ID does not exist" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.162886 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j76wb\" (UniqueName: \"kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb\") pod \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.163264 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output\") pod \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\" (UID: \"19733981-aad9-4fa7-8bc0-7e6255fe82b3\") " Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.171511 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb" (OuterVolumeSpecName: "kube-api-access-j76wb") pod "19733981-aad9-4fa7-8bc0-7e6255fe82b3" (UID: "19733981-aad9-4fa7-8bc0-7e6255fe82b3"). InnerVolumeSpecName "kube-api-access-j76wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.266131 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j76wb\" (UniqueName: \"kubernetes.io/projected/19733981-aad9-4fa7-8bc0-7e6255fe82b3-kube-api-access-j76wb\") on node \"crc\" DevicePath \"\"" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.342135 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "19733981-aad9-4fa7-8bc0-7e6255fe82b3" (UID: "19733981-aad9-4fa7-8bc0-7e6255fe82b3"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:12:29 crc kubenswrapper[4592]: I0929 18:12:29.371843 4592 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/19733981-aad9-4fa7-8bc0-7e6255fe82b3-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 18:12:31 crc kubenswrapper[4592]: I0929 18:12:31.191608 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:12:31 crc kubenswrapper[4592]: I0929 18:12:31.194448 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" path="/var/lib/kubelet/pods/19733981-aad9-4fa7-8bc0-7e6255fe82b3/volumes" Sep 29 18:12:32 crc kubenswrapper[4592]: I0929 18:12:32.129883 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9"} Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.116070 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mjks7/must-gather-dsd9f"] Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.116984 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="copy" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.116998 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="copy" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117014 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="extract-content" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117019 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="extract-content" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117033 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="extract-utilities" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117039 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="extract-utilities" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117053 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117059 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117068 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117073 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117086 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="extract-utilities" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117091 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="extract-utilities" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117098 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="gather" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117104 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="gather" Sep 29 18:13:05 crc kubenswrapper[4592]: E0929 18:13:05.117115 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="extract-content" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117121 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="extract-content" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117334 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="9858aa46-2b6d-4aee-b6e2-dd53a8d9f435" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117356 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd456293-1d61-4bb1-afb5-8364ac2a2811" containerName="registry-server" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117368 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="copy" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.117376 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="19733981-aad9-4fa7-8bc0-7e6255fe82b3" containerName="gather" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.118297 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.121646 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-mjks7"/"default-dockercfg-f29l6" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.121784 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mjks7"/"openshift-service-ca.crt" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.122782 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-mjks7"/"kube-root-ca.crt" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.133610 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mjks7/must-gather-dsd9f"] Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.169850 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.170019 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7nj9\" (UniqueName: \"kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.271364 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7nj9\" (UniqueName: \"kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.271502 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.271920 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.291162 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7nj9\" (UniqueName: \"kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9\") pod \"must-gather-dsd9f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.442935 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:13:05 crc kubenswrapper[4592]: I0929 18:13:05.989736 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-mjks7/must-gather-dsd9f"] Sep 29 18:13:06 crc kubenswrapper[4592]: I0929 18:13:06.511058 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/must-gather-dsd9f" event={"ID":"f8ef9825-c099-4d2c-9241-9eef9d20e90f","Type":"ContainerStarted","Data":"bd334d59de4bd75ba05c3b52a05c46b9c5956a30fe8770e4b5f2b3ed54e11054"} Sep 29 18:13:06 crc kubenswrapper[4592]: I0929 18:13:06.511349 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/must-gather-dsd9f" event={"ID":"f8ef9825-c099-4d2c-9241-9eef9d20e90f","Type":"ContainerStarted","Data":"02473609f5da81608c7d9f5f93bc7117ea45c4ec670c1841d3dfb032117d1279"} Sep 29 18:13:07 crc kubenswrapper[4592]: I0929 18:13:07.520821 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/must-gather-dsd9f" event={"ID":"f8ef9825-c099-4d2c-9241-9eef9d20e90f","Type":"ContainerStarted","Data":"4243085c3cd9b7ba49f303477262f4e6b9a79c4bca979fc4dc0fcf76cd0713a4"} Sep 29 18:13:07 crc kubenswrapper[4592]: I0929 18:13:07.549968 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mjks7/must-gather-dsd9f" podStartSLOduration=2.549953248 podStartE2EDuration="2.549953248s" podCreationTimestamp="2025-09-29 18:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:13:07.547929332 +0000 UTC m=+4917.695707013" watchObservedRunningTime="2025-09-29 18:13:07.549953248 +0000 UTC m=+4917.697730929" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.110893 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mjks7/crc-debug-9zpgp"] Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.112246 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.195496 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.195781 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw9nk\" (UniqueName: \"kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.297522 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.297587 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw9nk\" (UniqueName: \"kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.297655 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.324771 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw9nk\" (UniqueName: \"kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk\") pod \"crc-debug-9zpgp\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.436803 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:13:10 crc kubenswrapper[4592]: I0929 18:13:10.546792 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" event={"ID":"b439ad19-7cf5-47ed-91a6-3a4fa517bc12","Type":"ContainerStarted","Data":"019eb873c019f0a20cde1f1046972785325bb358277ff2951c453d9dd5354314"} Sep 29 18:13:11 crc kubenswrapper[4592]: I0929 18:13:11.556579 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" event={"ID":"b439ad19-7cf5-47ed-91a6-3a4fa517bc12","Type":"ContainerStarted","Data":"c3de6b425a59d7ff3cfea5d526feddd9c2fc13f1f492251c1f2064e1568dd464"} Sep 29 18:13:11 crc kubenswrapper[4592]: I0929 18:13:11.576243 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" podStartSLOduration=1.5762231629999999 podStartE2EDuration="1.576223163s" podCreationTimestamp="2025-09-29 18:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:13:11.568869351 +0000 UTC m=+4921.716647042" watchObservedRunningTime="2025-09-29 18:13:11.576223163 +0000 UTC m=+4921.724000864" Sep 29 18:14:23 crc kubenswrapper[4592]: I0929 18:14:23.495933 4592 scope.go:117] "RemoveContainer" containerID="530abb4e9bf38923cb875562d9c129dde43bc8c7c151f02248fd3501c7cf60c6" Sep 29 18:14:37 crc kubenswrapper[4592]: I0929 18:14:37.246595 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5b969b6866-rjf85_16bb91be-d91d-476e-a81d-44ef92c11718/barbican-api/0.log" Sep 29 18:14:37 crc kubenswrapper[4592]: I0929 18:14:37.369060 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5b969b6866-rjf85_16bb91be-d91d-476e-a81d-44ef92c11718/barbican-api-log/0.log" Sep 29 18:14:37 crc kubenswrapper[4592]: I0929 18:14:37.502470 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fdf4f774d-zgzql_cd181b6d-4f45-415c-8038-4bf077b0a747/barbican-keystone-listener/0.log" Sep 29 18:14:37 crc kubenswrapper[4592]: I0929 18:14:37.828196 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6fdf4f774d-zgzql_cd181b6d-4f45-415c-8038-4bf077b0a747/barbican-keystone-listener-log/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.331867 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78c8db585f-2qfs6_bda783a8-49d1-48be-9b21-695b1a673b1a/barbican-worker/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.332349 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-78c8db585f-2qfs6_bda783a8-49d1-48be-9b21-695b1a673b1a/barbican-worker-log/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.641596 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fblgd_5319ffbb-ba68-4bdb-b15e-a4e5a4d25f48/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.766639 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/ceilometer-central-agent/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.860285 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/proxy-httpd/0.log" Sep 29 18:14:38 crc kubenswrapper[4592]: I0929 18:14:38.973211 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/ceilometer-notification-agent/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.008990 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ff89d1c7-4a66-4e00-b04d-24e917c56e11/sg-core/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.269698 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_aa1f23ba-8aae-4a33-8946-7cfcd7087e6e/cinder-api/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.281217 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_aa1f23ba-8aae-4a33-8946-7cfcd7087e6e/cinder-api-log/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.574426 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c/cinder-scheduler/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.600811 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_8fa5ed6b-e86f-4d9e-a5e3-c684728ba62c/probe/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.804810 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-2pb2t_5f2c16e8-c860-42a9-9888-63e22d9d57b2/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:39 crc kubenswrapper[4592]: I0929 18:14:39.933259 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-mtzhq_33421f74-e3cd-4318-b751-ed324d225253/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.063356 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/init/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.296165 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/init/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.401446 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-w2j4s_b0275d99-00b1-4174-ab01-598af7ed19b7/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.619351 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667c9c995c-vzqb2_c242d2c5-5bda-4cd3-9324-7fd5d7403646/dnsmasq-dns/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.628897 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_f396e95c-bf51-4e4d-9dc7-76188423316b/glance-httpd/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.684744 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_f396e95c-bf51-4e4d-9dc7-76188423316b/glance-log/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.785400 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e42af79d-fc77-4451-8550-cbd866e1eabe/glance-log/0.log" Sep 29 18:14:40 crc kubenswrapper[4592]: I0929 18:14:40.838304 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e42af79d-fc77-4451-8550-cbd866e1eabe/glance-httpd/0.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.033882 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon/2.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.154740 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon/1.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.295010 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-w4m6p_3b5035f5-4d62-4661-8067-869b1e54997e/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.469444 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-79dcn_8896fbe8-6b4f-41d8-a85c-88ea182d4cf6/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.555298 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-749bb4c784-lnncs_2d536771-b1ae-4daf-a9f1-1a86e2af88e8/horizon-log/0.log" Sep 29 18:14:41 crc kubenswrapper[4592]: I0929 18:14:41.768736 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29319481-pq7gv_76943e31-a07e-46f1-865f-dcaa47257729/keystone-cron/0.log" Sep 29 18:14:42 crc kubenswrapper[4592]: I0929 18:14:42.028740 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-584d46f4c7-tdlrl_728e33a1-191b-4c9d-a2d2-e569433182ea/keystone-api/0.log" Sep 29 18:14:42 crc kubenswrapper[4592]: I0929 18:14:42.063793 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_1e655a7a-19bc-4d0d-ab87-2c906903d7c8/kube-state-metrics/0.log" Sep 29 18:14:42 crc kubenswrapper[4592]: I0929 18:14:42.209665 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8x5b6_40c378c3-0f92-474d-aaed-f3cd105e4714/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:42 crc kubenswrapper[4592]: I0929 18:14:42.912230 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-mxbhh_29930a89-f89e-4db7-85e6-4f47c1033098/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:42 crc kubenswrapper[4592]: I0929 18:14:42.973035 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5588c77f49-qmt48_177b2eb7-9986-4985-bd07-1b5a5d86f678/neutron-httpd/0.log" Sep 29 18:14:43 crc kubenswrapper[4592]: I0929 18:14:43.081411 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5588c77f49-qmt48_177b2eb7-9986-4985-bd07-1b5a5d86f678/neutron-api/0.log" Sep 29 18:14:44 crc kubenswrapper[4592]: I0929 18:14:44.280252 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_ecda42b4-525c-464f-ab13-394434750d4a/nova-cell0-conductor-conductor/0.log" Sep 29 18:14:44 crc kubenswrapper[4592]: I0929 18:14:44.553265 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_96bba62b-2b30-4b47-af6b-5bf6e32275a1/nova-api-log/0.log" Sep 29 18:14:44 crc kubenswrapper[4592]: I0929 18:14:44.923870 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_bb359aaf-6eae-40d2-a14e-3a7a47e3a286/nova-cell1-conductor-conductor/0.log" Sep 29 18:14:45 crc kubenswrapper[4592]: I0929 18:14:45.098217 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_96bba62b-2b30-4b47-af6b-5bf6e32275a1/nova-api-api/0.log" Sep 29 18:14:45 crc kubenswrapper[4592]: I0929 18:14:45.234201 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a998c4c4-de7e-4c25-b2c3-87d54e3b9e56/nova-cell1-novncproxy-novncproxy/0.log" Sep 29 18:14:45 crc kubenswrapper[4592]: I0929 18:14:45.489291 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-rrsnr_e5d09077-a84b-4b69-974b-5286b27f244f/nova-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:45 crc kubenswrapper[4592]: I0929 18:14:45.575951 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_483fd1ac-005e-4d6f-8d1d-03a192a3b366/nova-metadata-log/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.301560 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/mysql-bootstrap/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.318618 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_1ed52369-92ab-4da4-a517-1555c79b0a38/memcached/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.341106 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_d200c10f-b6bd-4908-b79e-7ab4ae10587d/nova-scheduler-scheduler/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.573976 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/mysql-bootstrap/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.607700 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_59ac4af6-5ade-49f1-8098-52e823dcf61f/galera/0.log" Sep 29 18:14:46 crc kubenswrapper[4592]: I0929 18:14:46.934851 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/mysql-bootstrap/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.088079 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_483fd1ac-005e-4d6f-8d1d-03a192a3b366/nova-metadata-metadata/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.105490 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/mysql-bootstrap/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.143258 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_4c85a81f-2e67-4a6f-928b-d4735005cd43/galera/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.389797 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_d6e91b2c-f8ba-4654-8431-a50545a2c37b/openstackclient/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.446103 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jfzwf_d813cc31-c8ba-48c0-b523-3d2b3fbc3341/ovn-controller/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.578503 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-8xpsr_7132c9e8-ff15-414a-b384-4a266f3c84f8/openstack-network-exporter/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.760077 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server-init/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.877606 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server-init/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.946541 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovsdb-server/0.log" Sep 29 18:14:47 crc kubenswrapper[4592]: I0929 18:14:47.969447 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7x4wp_fd373ead-845f-4c4d-b9d7-38f8424697d5/ovs-vswitchd/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.121780 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-qdjjh_121a0489-01a2-492b-a564-2718b687e621/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.166737 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8bd365a0-dba3-4f81-a229-a344e01a6eca/openstack-network-exporter/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.302628 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_8bd365a0-dba3-4f81-a229-a344e01a6eca/ovn-northd/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.451935 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b9e2d243-07ca-4b99-a929-9ae3321c3274/openstack-network-exporter/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.518452 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_b9e2d243-07ca-4b99-a929-9ae3321c3274/ovsdbserver-nb/0.log" Sep 29 18:14:48 crc kubenswrapper[4592]: I0929 18:14:48.602451 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1549c716-ca22-42ff-9cea-e63e50856936/openstack-network-exporter/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.020364 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1549c716-ca22-42ff-9cea-e63e50856936/ovsdbserver-sb/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.230611 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6cbb8cd48-47ckj_f53c28fe-50d3-49b2-926e-fe4f166838ce/placement-log/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.273355 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6cbb8cd48-47ckj_f53c28fe-50d3-49b2-926e-fe4f166838ce/placement-api/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.363795 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/setup-container/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.512258 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/setup-container/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.543468 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_9ae18931-f35a-4836-a054-06519e81aca0/rabbitmq/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.582470 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/setup-container/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.778287 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/setup-container/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.874857 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qgwj9_fffd0339-970b-41b0-b868-de31bfdc29b0/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:49 crc kubenswrapper[4592]: I0929 18:14:49.999315 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_b1c359e8-5df5-4ef2-97ed-a3753c1a681d/rabbitmq/0.log" Sep 29 18:14:50 crc kubenswrapper[4592]: I0929 18:14:50.090290 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-gqflz_894df7ab-ced1-483a-98a5-2e7e496f1578/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:50 crc kubenswrapper[4592]: I0929 18:14:50.192635 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-sqlgm_abd885d0-dbac-4845-8a3e-2454abf4d652/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:50 crc kubenswrapper[4592]: I0929 18:14:50.776800 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-gvrb2_6a007423-0554-48b3-b38a-d23f2509aacd/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:50 crc kubenswrapper[4592]: I0929 18:14:50.837100 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-2nfwh_af7efc5b-eb67-4660-92ae-77d6efa85b0f/ssh-known-hosts-edpm-deployment/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.115574 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d4767f8f-mgqs7_f1171449-a884-43cb-b254-c2ee282ea3a0/proxy-httpd/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.143163 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-74d4767f8f-mgqs7_f1171449-a884-43cb-b254-c2ee282ea3a0/proxy-server/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.530102 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-auditor/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.546447 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-kk56p_b87c021d-8ea5-4e65-9a34-68e38d02b6c3/swift-ring-rebalance/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.582839 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-reaper/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.711014 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-replicator/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.754403 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/account-server/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.788107 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-replicator/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.819394 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-auditor/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.930085 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-updater/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.944448 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/container-server/0.log" Sep 29 18:14:51 crc kubenswrapper[4592]: I0929 18:14:51.969170 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-auditor/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.012168 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-expirer/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.186871 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-replicator/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.262840 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-server/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.270939 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/object-updater/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.359279 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/rsync/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.570104 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-xjb5h_d76cbdef-0253-4fd5-abc2-bec6b0b6df81/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.610503 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f64e72d-c39e-45fa-b3df-ae8624976e86/swift-recon-cron/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.901542 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_1f57b8e4-0399-410d-a4ae-14451f3832f2/tempest-tests-tempest-tests-runner/0.log" Sep 29 18:14:52 crc kubenswrapper[4592]: I0929 18:14:52.941825 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_e85e8316-b254-45bb-b405-ac12c75f9433/test-operator-logs-container/0.log" Sep 29 18:14:53 crc kubenswrapper[4592]: I0929 18:14:53.089753 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-dxvj9_0641d7e4-c868-48bd-948d-186401c6f3c7/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.147646 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv"] Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.149459 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.151250 4592 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.151636 4592 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.157796 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv"] Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.253768 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.253913 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.253940 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fxpc\" (UniqueName: \"kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.355937 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.356095 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.356117 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fxpc\" (UniqueName: \"kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.363038 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.389726 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fxpc\" (UniqueName: \"kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.395246 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume\") pod \"collect-profiles-29319495-fvgpv\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.467538 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.883753 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:15:00 crc kubenswrapper[4592]: I0929 18:15:00.884015 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:15:01 crc kubenswrapper[4592]: I0929 18:15:01.313903 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv"] Sep 29 18:15:01 crc kubenswrapper[4592]: I0929 18:15:01.520719 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" event={"ID":"93a5ae2e-21f9-45e9-a5b7-787d2626aca8","Type":"ContainerStarted","Data":"7e836a448f03fae01f00a4ce13ff333065217f73eb3e28c6be1e10f69712ee44"} Sep 29 18:15:01 crc kubenswrapper[4592]: I0929 18:15:01.521050 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" event={"ID":"93a5ae2e-21f9-45e9-a5b7-787d2626aca8","Type":"ContainerStarted","Data":"891e1edb71bea3914c17dad34276aff2d26d8b1d155424c2958f369e7179ebc7"} Sep 29 18:15:01 crc kubenswrapper[4592]: I0929 18:15:01.535161 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" podStartSLOduration=1.535125976 podStartE2EDuration="1.535125976s" podCreationTimestamp="2025-09-29 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:15:01.532918085 +0000 UTC m=+5031.680695776" watchObservedRunningTime="2025-09-29 18:15:01.535125976 +0000 UTC m=+5031.682903657" Sep 29 18:15:02 crc kubenswrapper[4592]: I0929 18:15:02.528265 4592 generic.go:334] "Generic (PLEG): container finished" podID="93a5ae2e-21f9-45e9-a5b7-787d2626aca8" containerID="7e836a448f03fae01f00a4ce13ff333065217f73eb3e28c6be1e10f69712ee44" exitCode=0 Sep 29 18:15:02 crc kubenswrapper[4592]: I0929 18:15:02.528304 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" event={"ID":"93a5ae2e-21f9-45e9-a5b7-787d2626aca8","Type":"ContainerDied","Data":"7e836a448f03fae01f00a4ce13ff333065217f73eb3e28c6be1e10f69712ee44"} Sep 29 18:15:03 crc kubenswrapper[4592]: I0929 18:15:03.853190 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.022796 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fxpc\" (UniqueName: \"kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc\") pod \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.022932 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume\") pod \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.023090 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume\") pod \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\" (UID: \"93a5ae2e-21f9-45e9-a5b7-787d2626aca8\") " Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.025972 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume" (OuterVolumeSpecName: "config-volume") pod "93a5ae2e-21f9-45e9-a5b7-787d2626aca8" (UID: "93a5ae2e-21f9-45e9-a5b7-787d2626aca8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.029471 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc" (OuterVolumeSpecName: "kube-api-access-9fxpc") pod "93a5ae2e-21f9-45e9-a5b7-787d2626aca8" (UID: "93a5ae2e-21f9-45e9-a5b7-787d2626aca8"). InnerVolumeSpecName "kube-api-access-9fxpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.030615 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "93a5ae2e-21f9-45e9-a5b7-787d2626aca8" (UID: "93a5ae2e-21f9-45e9-a5b7-787d2626aca8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.128267 4592 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-secret-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.128299 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fxpc\" (UniqueName: \"kubernetes.io/projected/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-kube-api-access-9fxpc\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.128309 4592 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/93a5ae2e-21f9-45e9-a5b7-787d2626aca8-config-volume\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.368488 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg"] Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.376366 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29319450-vmkkg"] Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.541788 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" event={"ID":"93a5ae2e-21f9-45e9-a5b7-787d2626aca8","Type":"ContainerDied","Data":"891e1edb71bea3914c17dad34276aff2d26d8b1d155424c2958f369e7179ebc7"} Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.541837 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="891e1edb71bea3914c17dad34276aff2d26d8b1d155424c2958f369e7179ebc7" Sep 29 18:15:04 crc kubenswrapper[4592]: I0929 18:15:04.541911 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29319495-fvgpv" Sep 29 18:15:05 crc kubenswrapper[4592]: I0929 18:15:05.193573 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57cd85f9-6e86-40ae-9a20-7421a2399c6c" path="/var/lib/kubelet/pods/57cd85f9-6e86-40ae-9a20-7421a2399c6c/volumes" Sep 29 18:15:12 crc kubenswrapper[4592]: I0929 18:15:12.612013 4592 generic.go:334] "Generic (PLEG): container finished" podID="b439ad19-7cf5-47ed-91a6-3a4fa517bc12" containerID="c3de6b425a59d7ff3cfea5d526feddd9c2fc13f1f492251c1f2064e1568dd464" exitCode=0 Sep 29 18:15:12 crc kubenswrapper[4592]: I0929 18:15:12.612141 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" event={"ID":"b439ad19-7cf5-47ed-91a6-3a4fa517bc12","Type":"ContainerDied","Data":"c3de6b425a59d7ff3cfea5d526feddd9c2fc13f1f492251c1f2064e1568dd464"} Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.721994 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.768532 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-9zpgp"] Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.777759 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-9zpgp"] Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.877925 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw9nk\" (UniqueName: \"kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk\") pod \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.878237 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host\") pod \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\" (UID: \"b439ad19-7cf5-47ed-91a6-3a4fa517bc12\") " Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.878286 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host" (OuterVolumeSpecName: "host") pod "b439ad19-7cf5-47ed-91a6-3a4fa517bc12" (UID: "b439ad19-7cf5-47ed-91a6-3a4fa517bc12"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.878648 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.885681 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk" (OuterVolumeSpecName: "kube-api-access-kw9nk") pod "b439ad19-7cf5-47ed-91a6-3a4fa517bc12" (UID: "b439ad19-7cf5-47ed-91a6-3a4fa517bc12"). InnerVolumeSpecName "kube-api-access-kw9nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:15:13 crc kubenswrapper[4592]: I0929 18:15:13.979984 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw9nk\" (UniqueName: \"kubernetes.io/projected/b439ad19-7cf5-47ed-91a6-3a4fa517bc12-kube-api-access-kw9nk\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.632820 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="019eb873c019f0a20cde1f1046972785325bb358277ff2951c453d9dd5354314" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.633202 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-9zpgp" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.915839 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mjks7/crc-debug-vrk5n"] Sep 29 18:15:14 crc kubenswrapper[4592]: E0929 18:15:14.916209 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b439ad19-7cf5-47ed-91a6-3a4fa517bc12" containerName="container-00" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.916221 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="b439ad19-7cf5-47ed-91a6-3a4fa517bc12" containerName="container-00" Sep 29 18:15:14 crc kubenswrapper[4592]: E0929 18:15:14.916244 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93a5ae2e-21f9-45e9-a5b7-787d2626aca8" containerName="collect-profiles" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.916250 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="93a5ae2e-21f9-45e9-a5b7-787d2626aca8" containerName="collect-profiles" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.916424 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="93a5ae2e-21f9-45e9-a5b7-787d2626aca8" containerName="collect-profiles" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.916438 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="b439ad19-7cf5-47ed-91a6-3a4fa517bc12" containerName="container-00" Sep 29 18:15:14 crc kubenswrapper[4592]: I0929 18:15:14.917011 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.000342 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.000512 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm47c\" (UniqueName: \"kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.102505 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm47c\" (UniqueName: \"kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.102703 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.102948 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.128193 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm47c\" (UniqueName: \"kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c\") pod \"crc-debug-vrk5n\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.197947 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b439ad19-7cf5-47ed-91a6-3a4fa517bc12" path="/var/lib/kubelet/pods/b439ad19-7cf5-47ed-91a6-3a4fa517bc12/volumes" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.238779 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.643508 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" event={"ID":"bae41614-5778-49e9-8b12-58922727f63f","Type":"ContainerStarted","Data":"71e340a9daf6da492e5dd7f54350ca9690747ca9023211a1e29a7eba99f63d98"} Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.643836 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" event={"ID":"bae41614-5778-49e9-8b12-58922727f63f","Type":"ContainerStarted","Data":"e31c3d225d63017d2a36bc43bf6154242b85bfaffb49631cd0d7ebcf0aec1b8e"} Sep 29 18:15:15 crc kubenswrapper[4592]: I0929 18:15:15.658124 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" podStartSLOduration=1.6581018159999998 podStartE2EDuration="1.658101816s" podCreationTimestamp="2025-09-29 18:15:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-09-29 18:15:15.655895615 +0000 UTC m=+5045.803673306" watchObservedRunningTime="2025-09-29 18:15:15.658101816 +0000 UTC m=+5045.805879507" Sep 29 18:15:16 crc kubenswrapper[4592]: I0929 18:15:16.651385 4592 generic.go:334] "Generic (PLEG): container finished" podID="bae41614-5778-49e9-8b12-58922727f63f" containerID="71e340a9daf6da492e5dd7f54350ca9690747ca9023211a1e29a7eba99f63d98" exitCode=0 Sep 29 18:15:16 crc kubenswrapper[4592]: I0929 18:15:16.651479 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" event={"ID":"bae41614-5778-49e9-8b12-58922727f63f","Type":"ContainerDied","Data":"71e340a9daf6da492e5dd7f54350ca9690747ca9023211a1e29a7eba99f63d98"} Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.759944 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.877725 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wm47c\" (UniqueName: \"kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c\") pod \"bae41614-5778-49e9-8b12-58922727f63f\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.877893 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host\") pod \"bae41614-5778-49e9-8b12-58922727f63f\" (UID: \"bae41614-5778-49e9-8b12-58922727f63f\") " Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.878465 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host" (OuterVolumeSpecName: "host") pod "bae41614-5778-49e9-8b12-58922727f63f" (UID: "bae41614-5778-49e9-8b12-58922727f63f"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.887668 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c" (OuterVolumeSpecName: "kube-api-access-wm47c") pod "bae41614-5778-49e9-8b12-58922727f63f" (UID: "bae41614-5778-49e9-8b12-58922727f63f"). InnerVolumeSpecName "kube-api-access-wm47c". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.979476 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bae41614-5778-49e9-8b12-58922727f63f-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:17 crc kubenswrapper[4592]: I0929 18:15:17.979733 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wm47c\" (UniqueName: \"kubernetes.io/projected/bae41614-5778-49e9-8b12-58922727f63f-kube-api-access-wm47c\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:18 crc kubenswrapper[4592]: I0929 18:15:18.686125 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" event={"ID":"bae41614-5778-49e9-8b12-58922727f63f","Type":"ContainerDied","Data":"e31c3d225d63017d2a36bc43bf6154242b85bfaffb49631cd0d7ebcf0aec1b8e"} Sep 29 18:15:18 crc kubenswrapper[4592]: I0929 18:15:18.686176 4592 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e31c3d225d63017d2a36bc43bf6154242b85bfaffb49631cd0d7ebcf0aec1b8e" Sep 29 18:15:18 crc kubenswrapper[4592]: I0929 18:15:18.686211 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-vrk5n" Sep 29 18:15:23 crc kubenswrapper[4592]: I0929 18:15:23.460084 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-vrk5n"] Sep 29 18:15:23 crc kubenswrapper[4592]: I0929 18:15:23.471370 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-vrk5n"] Sep 29 18:15:23 crc kubenswrapper[4592]: I0929 18:15:23.567781 4592 scope.go:117] "RemoveContainer" containerID="491db416994d0f2750ebdfe3ad1c0907ae0369982a523c7781c3c05da298477b" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.699214 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-mjks7/crc-debug-jxrd8"] Sep 29 18:15:24 crc kubenswrapper[4592]: E0929 18:15:24.700084 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bae41614-5778-49e9-8b12-58922727f63f" containerName="container-00" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.700110 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="bae41614-5778-49e9-8b12-58922727f63f" containerName="container-00" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.700479 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="bae41614-5778-49e9-8b12-58922727f63f" containerName="container-00" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.701279 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.793255 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.793528 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbvf8\" (UniqueName: \"kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.895813 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbvf8\" (UniqueName: \"kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.895976 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.896107 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:24 crc kubenswrapper[4592]: I0929 18:15:24.930009 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbvf8\" (UniqueName: \"kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8\") pod \"crc-debug-jxrd8\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.020010 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.195570 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bae41614-5778-49e9-8b12-58922727f63f" path="/var/lib/kubelet/pods/bae41614-5778-49e9-8b12-58922727f63f/volumes" Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.758323 4592 generic.go:334] "Generic (PLEG): container finished" podID="cfe8a593-0a55-47b6-a4ec-06817e1babcc" containerID="83d2bfad7e163790a26c9b3280653ab5f4c49882ff318ae7e77a53577d58debc" exitCode=0 Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.758575 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" event={"ID":"cfe8a593-0a55-47b6-a4ec-06817e1babcc","Type":"ContainerDied","Data":"83d2bfad7e163790a26c9b3280653ab5f4c49882ff318ae7e77a53577d58debc"} Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.758599 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" event={"ID":"cfe8a593-0a55-47b6-a4ec-06817e1babcc","Type":"ContainerStarted","Data":"3f8969d8d41ee8e85ec43e7bfe4cd9fa793a03056edc00b40e07a70a157cd5e1"} Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.836871 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-jxrd8"] Sep 29 18:15:25 crc kubenswrapper[4592]: I0929 18:15:25.846526 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mjks7/crc-debug-jxrd8"] Sep 29 18:15:26 crc kubenswrapper[4592]: I0929 18:15:26.845493 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:26 crc kubenswrapper[4592]: I0929 18:15:26.949686 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host\") pod \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " Sep 29 18:15:26 crc kubenswrapper[4592]: I0929 18:15:26.949739 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbvf8\" (UniqueName: \"kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8\") pod \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\" (UID: \"cfe8a593-0a55-47b6-a4ec-06817e1babcc\") " Sep 29 18:15:26 crc kubenswrapper[4592]: I0929 18:15:26.950783 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host" (OuterVolumeSpecName: "host") pod "cfe8a593-0a55-47b6-a4ec-06817e1babcc" (UID: "cfe8a593-0a55-47b6-a4ec-06817e1babcc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Sep 29 18:15:26 crc kubenswrapper[4592]: I0929 18:15:26.961439 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8" (OuterVolumeSpecName: "kube-api-access-sbvf8") pod "cfe8a593-0a55-47b6-a4ec-06817e1babcc" (UID: "cfe8a593-0a55-47b6-a4ec-06817e1babcc"). InnerVolumeSpecName "kube-api-access-sbvf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.051652 4592 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/cfe8a593-0a55-47b6-a4ec-06817e1babcc-host\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.051680 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbvf8\" (UniqueName: \"kubernetes.io/projected/cfe8a593-0a55-47b6-a4ec-06817e1babcc-kube-api-access-sbvf8\") on node \"crc\" DevicePath \"\"" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.192010 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfe8a593-0a55-47b6-a4ec-06817e1babcc" path="/var/lib/kubelet/pods/cfe8a593-0a55-47b6-a4ec-06817e1babcc/volumes" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.532239 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.767712 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.775519 4592 scope.go:117] "RemoveContainer" containerID="83d2bfad7e163790a26c9b3280653ab5f4c49882ff318ae7e77a53577d58debc" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.775539 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/crc-debug-jxrd8" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.797677 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.832948 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:15:27 crc kubenswrapper[4592]: I0929 18:15:27.992152 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/util/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.052819 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/pull/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.075247 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9b74bb8058ab023cabc16743198c570c400e09be4176ca71d5a137ed1c9d8lx_8bf4afc4-5581-49d5-853c-8309dd3ea1bd/extract/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.202989 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-55xvk_7ef58432-073e-43a5-bc36-38cb3611b118/kube-rbac-proxy/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.338447 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-5p559_de451eb0-13ae-4fab-a6f3-3cc8fb77566f/kube-rbac-proxy/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.373824 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6ff8b75857-55xvk_7ef58432-073e-43a5-bc36-38cb3611b118/manager/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.424120 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-644bddb6d8-5p559_de451eb0-13ae-4fab-a6f3-3cc8fb77566f/manager/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.801790 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-bj7fn_ba256bd8-c14c-458e-b919-2feedb3a0c46/kube-rbac-proxy/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.826857 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-84f4f7b77b-bj7fn_ba256bd8-c14c-458e-b919-2feedb3a0c46/manager/0.log" Sep 29 18:15:28 crc kubenswrapper[4592]: I0929 18:15:28.996374 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k969c_f8504fb5-9c3b-4b51-bf22-31c6bcdacad4/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.075634 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-84958c4d49-k969c_f8504fb5-9c3b-4b51-bf22-31c6bcdacad4/manager/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.183782 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-f8v56_9c565c72-206a-42a7-943d-c55fd9065e5f/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.184673 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5d889d78cf-f8v56_9c565c72-206a-42a7-943d-c55fd9065e5f/manager/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.337466 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-p4r66_1d74dab2-fe04-4218-8b91-4b958b0ad39d/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.360516 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-9f4696d94-p4r66_1d74dab2-fe04-4218-8b91-4b958b0ad39d/manager/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.530519 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-b8fbf_a4e61264-26ad-4012-be6c-4d6596b4ab27/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.667131 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-kjsck_a4b81165-b69a-40fa-b875-6d138351d6e6/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.669450 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-7d857cc749-b8fbf_a4e61264-26ad-4012-be6c-4d6596b4ab27/manager/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.753862 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-7975b88857-kjsck_a4b81165-b69a-40fa-b875-6d138351d6e6/manager/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.865198 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-9n2d2_dff5de8b-2910-4e5a-a80a-089c649039cd/kube-rbac-proxy/0.log" Sep 29 18:15:29 crc kubenswrapper[4592]: I0929 18:15:29.979666 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-5bd55b4bff-9n2d2_dff5de8b-2910-4e5a-a80a-089c649039cd/manager/0.log" Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.591432 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-cffzv_bfa2f914-2596-49e6-bb75-760663a69813/kube-rbac-proxy/0.log" Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.592956 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6d68dbc695-cffzv_bfa2f914-2596-49e6-bb75-760663a69813/manager/0.log" Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.785300 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-ssmqx_6bf183ea-90d6-4aff-9e61-d4cc3692fe08/kube-rbac-proxy/0.log" Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.882816 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.882869 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:15:30 crc kubenswrapper[4592]: I0929 18:15:30.914683 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-88c7-ssmqx_6bf183ea-90d6-4aff-9e61-d4cc3692fe08/manager/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.069017 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-7sgxz_fd9f041b-9fd6-4d50-bc82-35fd86eea539/kube-rbac-proxy/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.163548 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-64d7b59854-7sgxz_fd9f041b-9fd6-4d50-bc82-35fd86eea539/manager/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.210452 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-hg6b2_5746404b-3a0f-4851-9de9-28e4e7ef8f1f/kube-rbac-proxy/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.406303 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fc5kq_87bb1f2b-bc93-4b10-aa27-b8efd9ba669a/kube-rbac-proxy/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.410815 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-76fcc6dc7c-fc5kq_87bb1f2b-bc93-4b10-aa27-b8efd9ba669a/manager/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.512164 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-c7c776c96-hg6b2_5746404b-3a0f-4851-9de9-28e4e7ef8f1f/manager/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.612499 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-fqfbf_965c0641-f6e8-44e3-a8a1-32028665b9e2/manager/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.712593 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6d776955-fqfbf_965c0641-f6e8-44e3-a8a1-32028665b9e2/kube-rbac-proxy/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.758015 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b9c4c58f5-fdd5n_8da8ce1f-60e7-4381-975e-daf9c5225b10/kube-rbac-proxy/0.log" Sep 29 18:15:31 crc kubenswrapper[4592]: I0929 18:15:31.946225 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7bf7677558-jj6jr_83cbe230-dcbc-4c90-befd-35f5082eaba6/kube-rbac-proxy/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.136072 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-7bf7677558-jj6jr_83cbe230-dcbc-4c90-befd-35f5082eaba6/operator/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.285908 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-bc9lr_99fc0436-2ce9-4df7-ad2b-4ddb6dff9983/registry-server/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.395283 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-l97v2_bb38bf88-f05d-4e0e-8923-66b2097e247c/kube-rbac-proxy/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.485293 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-9976ff44c-l97v2_bb38bf88-f05d-4e0e-8923-66b2097e247c/manager/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.592399 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j5st9_1100f7ed-81d3-49d8-9852-867de93e273b/kube-rbac-proxy/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.643442 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-589c58c6c-j5st9_1100f7ed-81d3-49d8-9852-867de93e273b/manager/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.749302 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7b9c4c58f5-fdd5n_8da8ce1f-60e7-4381-975e-daf9c5225b10/manager/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.798730 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-79d8469568-2pcjb_a3543654-318a-48ed-8109-a76d758b231d/operator/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.866588 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-pfzkm_9974276a-24ba-4ca1-9c70-f85e17e9c10c/kube-rbac-proxy/0.log" Sep 29 18:15:32 crc kubenswrapper[4592]: I0929 18:15:32.960495 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-bc7dc7bd9-pfzkm_9974276a-24ba-4ca1-9c70-f85e17e9c10c/manager/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.015268 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-77pht_5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4/kube-rbac-proxy/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.131247 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-b8d54b5d7-77pht_5b1e94d9-e8fc-4c6a-b91f-709e5a5321a4/manager/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.171490 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-mxl29_33a1eea4-82d2-438a-a844-6539c3016172/kube-rbac-proxy/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.242539 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-f66b554c6-mxl29_33a1eea4-82d2-438a-a844-6539c3016172/manager/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.376502 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-mqwzl_50b8b2f8-551f-4379-84b6-5b217fa8b50c/kube-rbac-proxy/0.log" Sep 29 18:15:33 crc kubenswrapper[4592]: I0929 18:15:33.421175 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-76669f99c-mqwzl_50b8b2f8-551f-4379-84b6-5b217fa8b50c/manager/0.log" Sep 29 18:15:51 crc kubenswrapper[4592]: I0929 18:15:51.700361 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-m4z52_ff85be0b-4fe9-43fa-941f-c00f69b7f459/control-plane-machine-set-operator/0.log" Sep 29 18:15:51 crc kubenswrapper[4592]: I0929 18:15:51.792226 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-smrv8_92eb6f89-2332-47d7-a04c-19e63442c882/kube-rbac-proxy/0.log" Sep 29 18:15:51 crc kubenswrapper[4592]: I0929 18:15:51.847263 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-smrv8_92eb6f89-2332-47d7-a04c-19e63442c882/machine-api-operator/0.log" Sep 29 18:16:00 crc kubenswrapper[4592]: I0929 18:16:00.883115 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:16:00 crc kubenswrapper[4592]: I0929 18:16:00.883928 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:16:00 crc kubenswrapper[4592]: I0929 18:16:00.884011 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 18:16:00 crc kubenswrapper[4592]: I0929 18:16:00.885363 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 18:16:00 crc kubenswrapper[4592]: I0929 18:16:00.885499 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9" gracePeriod=600 Sep 29 18:16:01 crc kubenswrapper[4592]: I0929 18:16:01.121777 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9" exitCode=0 Sep 29 18:16:01 crc kubenswrapper[4592]: I0929 18:16:01.121847 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9"} Sep 29 18:16:01 crc kubenswrapper[4592]: I0929 18:16:01.121890 4592 scope.go:117] "RemoveContainer" containerID="404beb8415f13922250f052fa3d48dfde6759ba399c61122c824d7de13df6362" Sep 29 18:16:02 crc kubenswrapper[4592]: I0929 18:16:02.135274 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerStarted","Data":"a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2"} Sep 29 18:16:04 crc kubenswrapper[4592]: I0929 18:16:04.715899 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-wnhfl_0080b650-be9f-452d-8c10-69ae3480edf2/cert-manager-controller/0.log" Sep 29 18:16:04 crc kubenswrapper[4592]: I0929 18:16:04.948813 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-vdxkr_eb6c1a73-f740-4b79-ab2a-ccf80a36deb5/cert-manager-webhook/0.log" Sep 29 18:16:04 crc kubenswrapper[4592]: I0929 18:16:04.981874 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-6phr8_26c87439-d01b-405b-9567-f2c2c83283e1/cert-manager-cainjector/0.log" Sep 29 18:16:18 crc kubenswrapper[4592]: I0929 18:16:18.522883 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-864bb6dfb5-94s8k_f5019cbd-3156-4d20-9c40-163965b4ca0b/nmstate-console-plugin/0.log" Sep 29 18:16:18 crc kubenswrapper[4592]: I0929 18:16:18.695209 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-zgcpn_252fdf6a-56d5-473a-b492-e9b94bc89d19/nmstate-handler/0.log" Sep 29 18:16:18 crc kubenswrapper[4592]: I0929 18:16:18.751325 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5lhdp_15e33da6-1266-4757-ab8b-bcbd435b8d26/kube-rbac-proxy/0.log" Sep 29 18:16:18 crc kubenswrapper[4592]: I0929 18:16:18.815236 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-58fcddf996-5lhdp_15e33da6-1266-4757-ab8b-bcbd435b8d26/nmstate-metrics/0.log" Sep 29 18:16:19 crc kubenswrapper[4592]: I0929 18:16:19.007694 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5d6f6cfd66-x6d4v_e9ff4f23-0699-427f-86b3-275b408c261a/nmstate-operator/0.log" Sep 29 18:16:19 crc kubenswrapper[4592]: I0929 18:16:19.010205 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6d689559c5-cfxmq_c9a4f2e3-e2ba-460b-92b2-a7cfda566c50/nmstate-webhook/0.log" Sep 29 18:16:33 crc kubenswrapper[4592]: I0929 18:16:33.762328 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jzgm6_560cbf05-ef23-4767-bd5c-eabd7f3eb864/kube-rbac-proxy/0.log" Sep 29 18:16:33 crc kubenswrapper[4592]: I0929 18:16:33.968205 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-jzgm6_560cbf05-ef23-4767-bd5c-eabd7f3eb864/controller/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.156826 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.237671 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.248564 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.266701 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.373817 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.524796 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.541554 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.597609 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.640931 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.823787 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-metrics/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.831382 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-frr-files/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.841907 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/cp-reloader/0.log" Sep 29 18:16:34 crc kubenswrapper[4592]: I0929 18:16:34.854602 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/controller/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.046273 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/kube-rbac-proxy/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.061614 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/frr-metrics/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.069893 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/kube-rbac-proxy-frr/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.308308 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/reloader/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.341722 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-lrvw2_2c5260c1-19c0-4d8b-b659-5a09f3a887da/frr-k8s-webhook-server/0.log" Sep 29 18:16:35 crc kubenswrapper[4592]: I0929 18:16:35.654376 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5b64bfcc84-9fvrf_764035e6-f447-4e19-a17f-c334e6270ba6/manager/0.log" Sep 29 18:16:36 crc kubenswrapper[4592]: I0929 18:16:36.145897 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c7645bc9b-vdzjb_7290d1e2-eecb-4663-8c34-66c35acc0726/webhook-server/0.log" Sep 29 18:16:36 crc kubenswrapper[4592]: I0929 18:16:36.201801 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-drlk6_8fa3e452-ce03-4b76-812f-cc9ff86f9b10/frr/0.log" Sep 29 18:16:36 crc kubenswrapper[4592]: I0929 18:16:36.737145 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z794s_8634039b-db7c-46c2-a140-a746270aa768/kube-rbac-proxy/0.log" Sep 29 18:16:37 crc kubenswrapper[4592]: I0929 18:16:37.102777 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-z794s_8634039b-db7c-46c2-a140-a746270aa768/speaker/0.log" Sep 29 18:16:50 crc kubenswrapper[4592]: I0929 18:16:50.752982 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:16:50 crc kubenswrapper[4592]: I0929 18:16:50.952895 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:16:50 crc kubenswrapper[4592]: I0929 18:16:50.953782 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.014958 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.181367 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/util/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.232839 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/extract/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.244956 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_9a6e092ce660f08e14c0b0ceab3711fa43f2b70244f9df8a7a069040bcfh6xb_9368ddaf-962a-4262-91ad-5febcadc8dbf/pull/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.388702 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.530402 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.542808 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.611341 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.727974 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-content/0.log" Sep 29 18:16:51 crc kubenswrapper[4592]: I0929 18:16:51.800815 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/extract-utilities/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.002681 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.197751 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.283076 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-ztq52_b766f454-e3ec-4c1c-b730-a4b9a4c47068/registry-server/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.330008 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.350488 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.494340 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-utilities/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.505550 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/extract-content/0.log" Sep 29 18:16:52 crc kubenswrapper[4592]: I0929 18:16:52.781204 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.074435 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.120731 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-b5x2p_f1f1db21-cfd8-4071-8923-9a7b08eeb035/registry-server/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.127996 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.169521 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.350237 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/pull/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.372424 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/extract/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.410209 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96rljb9_f3c7e075-9f35-4418-b416-d5839c9d6b88/util/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.593432 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-hq85k_d9110599-7f42-4970-93fa-89f37c84fad3/marketplace-operator/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.722959 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.786929 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.840548 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:16:53 crc kubenswrapper[4592]: I0929 18:16:53.846454 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.052346 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-content/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.073977 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/extract-utilities/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.114888 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.265414 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-d2j4d_baf334fc-312d-4264-a6e0-a4c2569421d2/registry-server/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.365888 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.367831 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.391892 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.527882 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-utilities/0.log" Sep 29 18:16:54 crc kubenswrapper[4592]: I0929 18:16:54.533831 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/extract-content/0.log" Sep 29 18:16:55 crc kubenswrapper[4592]: I0929 18:16:55.048747 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-29m4q_204d33ca-6209-484f-b882-14d0c4270129/registry-server/0.log" Sep 29 18:18:14 crc kubenswrapper[4592]: I0929 18:18:14.925922 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:14 crc kubenswrapper[4592]: E0929 18:18:14.927044 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe8a593-0a55-47b6-a4ec-06817e1babcc" containerName="container-00" Sep 29 18:18:14 crc kubenswrapper[4592]: I0929 18:18:14.927065 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe8a593-0a55-47b6-a4ec-06817e1babcc" containerName="container-00" Sep 29 18:18:14 crc kubenswrapper[4592]: I0929 18:18:14.927471 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe8a593-0a55-47b6-a4ec-06817e1babcc" containerName="container-00" Sep 29 18:18:14 crc kubenswrapper[4592]: I0929 18:18:14.929779 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.008406 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.071779 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.071840 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6tdz\" (UniqueName: \"kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.072081 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.173997 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.174061 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6tdz\" (UniqueName: \"kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.174104 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.174655 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.174929 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.218077 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6tdz\" (UniqueName: \"kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz\") pod \"community-operators-twdrl\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.254515 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:15 crc kubenswrapper[4592]: I0929 18:18:15.819258 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:16 crc kubenswrapper[4592]: I0929 18:18:16.495515 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerID="527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8" exitCode=0 Sep 29 18:18:16 crc kubenswrapper[4592]: I0929 18:18:16.495889 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerDied","Data":"527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8"} Sep 29 18:18:16 crc kubenswrapper[4592]: I0929 18:18:16.495940 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerStarted","Data":"40f96a53d41ea89afa46f496a18188f815c66394945e52547bdcdef0dd9fdd82"} Sep 29 18:18:16 crc kubenswrapper[4592]: I0929 18:18:16.498269 4592 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Sep 29 18:18:18 crc kubenswrapper[4592]: I0929 18:18:18.524554 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerStarted","Data":"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79"} Sep 29 18:18:19 crc kubenswrapper[4592]: I0929 18:18:19.537407 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerID="1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79" exitCode=0 Sep 29 18:18:19 crc kubenswrapper[4592]: I0929 18:18:19.537474 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerDied","Data":"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79"} Sep 29 18:18:20 crc kubenswrapper[4592]: I0929 18:18:20.552355 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerStarted","Data":"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4"} Sep 29 18:18:20 crc kubenswrapper[4592]: I0929 18:18:20.570905 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-twdrl" podStartSLOduration=2.846902882 podStartE2EDuration="6.570888652s" podCreationTimestamp="2025-09-29 18:18:14 +0000 UTC" firstStartedPulling="2025-09-29 18:18:16.497466721 +0000 UTC m=+5226.645244442" lastFinishedPulling="2025-09-29 18:18:20.221452521 +0000 UTC m=+5230.369230212" observedRunningTime="2025-09-29 18:18:20.569896344 +0000 UTC m=+5230.717674045" watchObservedRunningTime="2025-09-29 18:18:20.570888652 +0000 UTC m=+5230.718666333" Sep 29 18:18:25 crc kubenswrapper[4592]: I0929 18:18:25.255841 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:25 crc kubenswrapper[4592]: I0929 18:18:25.256459 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:25 crc kubenswrapper[4592]: I0929 18:18:25.318290 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:25 crc kubenswrapper[4592]: I0929 18:18:25.664055 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:25 crc kubenswrapper[4592]: I0929 18:18:25.707337 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:27 crc kubenswrapper[4592]: I0929 18:18:27.632817 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-twdrl" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="registry-server" containerID="cri-o://b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4" gracePeriod=2 Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.162693 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.242539 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content\") pod \"8e62bcdd-0b81-4b54-a671-1c350c072277\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.242853 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6tdz\" (UniqueName: \"kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz\") pod \"8e62bcdd-0b81-4b54-a671-1c350c072277\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.242883 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities\") pod \"8e62bcdd-0b81-4b54-a671-1c350c072277\" (UID: \"8e62bcdd-0b81-4b54-a671-1c350c072277\") " Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.243832 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities" (OuterVolumeSpecName: "utilities") pod "8e62bcdd-0b81-4b54-a671-1c350c072277" (UID: "8e62bcdd-0b81-4b54-a671-1c350c072277"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.248929 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz" (OuterVolumeSpecName: "kube-api-access-d6tdz") pod "8e62bcdd-0b81-4b54-a671-1c350c072277" (UID: "8e62bcdd-0b81-4b54-a671-1c350c072277"). InnerVolumeSpecName "kube-api-access-d6tdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.306507 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e62bcdd-0b81-4b54-a671-1c350c072277" (UID: "8e62bcdd-0b81-4b54-a671-1c350c072277"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.344535 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.344569 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6tdz\" (UniqueName: \"kubernetes.io/projected/8e62bcdd-0b81-4b54-a671-1c350c072277-kube-api-access-d6tdz\") on node \"crc\" DevicePath \"\"" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.344579 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e62bcdd-0b81-4b54-a671-1c350c072277-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.644991 4592 generic.go:334] "Generic (PLEG): container finished" podID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerID="b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4" exitCode=0 Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.645053 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerDied","Data":"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4"} Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.645103 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-twdrl" event={"ID":"8e62bcdd-0b81-4b54-a671-1c350c072277","Type":"ContainerDied","Data":"40f96a53d41ea89afa46f496a18188f815c66394945e52547bdcdef0dd9fdd82"} Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.645132 4592 scope.go:117] "RemoveContainer" containerID="b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.646382 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-twdrl" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.674737 4592 scope.go:117] "RemoveContainer" containerID="1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.714503 4592 scope.go:117] "RemoveContainer" containerID="527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.717350 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.727351 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-twdrl"] Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.777049 4592 scope.go:117] "RemoveContainer" containerID="b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4" Sep 29 18:18:28 crc kubenswrapper[4592]: E0929 18:18:28.777543 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4\": container with ID starting with b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4 not found: ID does not exist" containerID="b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.777603 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4"} err="failed to get container status \"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4\": rpc error: code = NotFound desc = could not find container \"b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4\": container with ID starting with b8f0266ac94c37d47723ff74dc3b8da20c50e95d3c4120bc4bef3a9679c910f4 not found: ID does not exist" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.777631 4592 scope.go:117] "RemoveContainer" containerID="1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79" Sep 29 18:18:28 crc kubenswrapper[4592]: E0929 18:18:28.779168 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79\": container with ID starting with 1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79 not found: ID does not exist" containerID="1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.779200 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79"} err="failed to get container status \"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79\": rpc error: code = NotFound desc = could not find container \"1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79\": container with ID starting with 1c76596dd6e6a0413076d3cd46585e625c817b1127bd0add5016c118e2bbef79 not found: ID does not exist" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.779218 4592 scope.go:117] "RemoveContainer" containerID="527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8" Sep 29 18:18:28 crc kubenswrapper[4592]: E0929 18:18:28.779450 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8\": container with ID starting with 527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8 not found: ID does not exist" containerID="527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8" Sep 29 18:18:28 crc kubenswrapper[4592]: I0929 18:18:28.779486 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8"} err="failed to get container status \"527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8\": rpc error: code = NotFound desc = could not find container \"527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8\": container with ID starting with 527c268087466f74b0ff72f46580c547b427fa567c8e9d1228e6feb34ea199f8 not found: ID does not exist" Sep 29 18:18:29 crc kubenswrapper[4592]: I0929 18:18:29.196297 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" path="/var/lib/kubelet/pods/8e62bcdd-0b81-4b54-a671-1c350c072277/volumes" Sep 29 18:18:30 crc kubenswrapper[4592]: I0929 18:18:30.883333 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:18:30 crc kubenswrapper[4592]: I0929 18:18:30.883708 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.803519 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:18:38 crc kubenswrapper[4592]: E0929 18:18:38.805552 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="registry-server" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.805657 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="registry-server" Sep 29 18:18:38 crc kubenswrapper[4592]: E0929 18:18:38.805748 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="extract-content" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.805821 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="extract-content" Sep 29 18:18:38 crc kubenswrapper[4592]: E0929 18:18:38.805923 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="extract-utilities" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.805998 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="extract-utilities" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.806311 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e62bcdd-0b81-4b54-a671-1c350c072277" containerName="registry-server" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.808052 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.820259 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.897903 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.898015 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:38 crc kubenswrapper[4592]: I0929 18:18:38.898116 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75khx\" (UniqueName: \"kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:38.999996 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.000071 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.000130 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75khx\" (UniqueName: \"kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.000816 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.000860 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.021320 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75khx\" (UniqueName: \"kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx\") pod \"redhat-operators-6wb2f\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.127930 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.614729 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:18:39 crc kubenswrapper[4592]: I0929 18:18:39.764645 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerStarted","Data":"b05c9eb1c187715216bcd8e8e9ff334bf65a813222ba9c7e02aebda604e11087"} Sep 29 18:18:40 crc kubenswrapper[4592]: I0929 18:18:40.775636 4592 generic.go:334] "Generic (PLEG): container finished" podID="63942411-8161-486c-941a-b089b2efcfa0" containerID="29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7" exitCode=0 Sep 29 18:18:40 crc kubenswrapper[4592]: I0929 18:18:40.775735 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerDied","Data":"29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7"} Sep 29 18:18:42 crc kubenswrapper[4592]: I0929 18:18:42.802660 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerStarted","Data":"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb"} Sep 29 18:18:45 crc kubenswrapper[4592]: I0929 18:18:45.832093 4592 generic.go:334] "Generic (PLEG): container finished" podID="63942411-8161-486c-941a-b089b2efcfa0" containerID="919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb" exitCode=0 Sep 29 18:18:45 crc kubenswrapper[4592]: I0929 18:18:45.832175 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerDied","Data":"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb"} Sep 29 18:18:46 crc kubenswrapper[4592]: I0929 18:18:46.846058 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerStarted","Data":"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea"} Sep 29 18:18:49 crc kubenswrapper[4592]: I0929 18:18:49.129070 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:49 crc kubenswrapper[4592]: I0929 18:18:49.129467 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:50 crc kubenswrapper[4592]: I0929 18:18:50.204783 4592 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6wb2f" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="registry-server" probeResult="failure" output=< Sep 29 18:18:50 crc kubenswrapper[4592]: timeout: failed to connect service ":50051" within 1s Sep 29 18:18:50 crc kubenswrapper[4592]: > Sep 29 18:18:59 crc kubenswrapper[4592]: I0929 18:18:59.194766 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:59 crc kubenswrapper[4592]: I0929 18:18:59.227590 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6wb2f" podStartSLOduration=15.721322382 podStartE2EDuration="21.227561756s" podCreationTimestamp="2025-09-29 18:18:38 +0000 UTC" firstStartedPulling="2025-09-29 18:18:40.778069949 +0000 UTC m=+5250.925847630" lastFinishedPulling="2025-09-29 18:18:46.284309323 +0000 UTC m=+5256.432087004" observedRunningTime="2025-09-29 18:18:46.870562084 +0000 UTC m=+5257.018339765" watchObservedRunningTime="2025-09-29 18:18:59.227561756 +0000 UTC m=+5269.375339477" Sep 29 18:18:59 crc kubenswrapper[4592]: I0929 18:18:59.260989 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:18:59 crc kubenswrapper[4592]: I0929 18:18:59.452249 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:19:00 crc kubenswrapper[4592]: I0929 18:19:00.884088 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:19:00 crc kubenswrapper[4592]: I0929 18:19:00.884668 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:19:00 crc kubenswrapper[4592]: I0929 18:19:00.985945 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6wb2f" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="registry-server" containerID="cri-o://ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea" gracePeriod=2 Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.463880 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.582110 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content\") pod \"63942411-8161-486c-941a-b089b2efcfa0\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.582410 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities\") pod \"63942411-8161-486c-941a-b089b2efcfa0\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.582479 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75khx\" (UniqueName: \"kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx\") pod \"63942411-8161-486c-941a-b089b2efcfa0\" (UID: \"63942411-8161-486c-941a-b089b2efcfa0\") " Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.583109 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities" (OuterVolumeSpecName: "utilities") pod "63942411-8161-486c-941a-b089b2efcfa0" (UID: "63942411-8161-486c-941a-b089b2efcfa0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.587957 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx" (OuterVolumeSpecName: "kube-api-access-75khx") pod "63942411-8161-486c-941a-b089b2efcfa0" (UID: "63942411-8161-486c-941a-b089b2efcfa0"). InnerVolumeSpecName "kube-api-access-75khx". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.662827 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "63942411-8161-486c-941a-b089b2efcfa0" (UID: "63942411-8161-486c-941a-b089b2efcfa0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.684900 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.684941 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/63942411-8161-486c-941a-b089b2efcfa0-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.684953 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75khx\" (UniqueName: \"kubernetes.io/projected/63942411-8161-486c-941a-b089b2efcfa0-kube-api-access-75khx\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.995931 4592 generic.go:334] "Generic (PLEG): container finished" podID="63942411-8161-486c-941a-b089b2efcfa0" containerID="ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea" exitCode=0 Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.995971 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wb2f" Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.995991 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerDied","Data":"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea"} Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.996028 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wb2f" event={"ID":"63942411-8161-486c-941a-b089b2efcfa0","Type":"ContainerDied","Data":"b05c9eb1c187715216bcd8e8e9ff334bf65a813222ba9c7e02aebda604e11087"} Sep 29 18:19:01 crc kubenswrapper[4592]: I0929 18:19:01.996046 4592 scope.go:117] "RemoveContainer" containerID="ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.017364 4592 scope.go:117] "RemoveContainer" containerID="919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.037766 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.047482 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6wb2f"] Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.053935 4592 scope.go:117] "RemoveContainer" containerID="29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.097335 4592 scope.go:117] "RemoveContainer" containerID="ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea" Sep 29 18:19:02 crc kubenswrapper[4592]: E0929 18:19:02.097767 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea\": container with ID starting with ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea not found: ID does not exist" containerID="ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.097813 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea"} err="failed to get container status \"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea\": rpc error: code = NotFound desc = could not find container \"ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea\": container with ID starting with ddee0c1fb65a290e110144127fcfa18aed0ee79590d517163e009204ea3539ea not found: ID does not exist" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.097841 4592 scope.go:117] "RemoveContainer" containerID="919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb" Sep 29 18:19:02 crc kubenswrapper[4592]: E0929 18:19:02.098130 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb\": container with ID starting with 919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb not found: ID does not exist" containerID="919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.098227 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb"} err="failed to get container status \"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb\": rpc error: code = NotFound desc = could not find container \"919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb\": container with ID starting with 919ea7fb3a19ba26bbb1476c9231779a7b7d336c4d8d206fca75f3ef04c37cfb not found: ID does not exist" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.098246 4592 scope.go:117] "RemoveContainer" containerID="29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7" Sep 29 18:19:02 crc kubenswrapper[4592]: E0929 18:19:02.098472 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7\": container with ID starting with 29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7 not found: ID does not exist" containerID="29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7" Sep 29 18:19:02 crc kubenswrapper[4592]: I0929 18:19:02.098501 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7"} err="failed to get container status \"29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7\": rpc error: code = NotFound desc = could not find container \"29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7\": container with ID starting with 29114c679f7a3833191ee890957f6e35fd941ea79cb22531a09b10c65d3de4b7 not found: ID does not exist" Sep 29 18:19:03 crc kubenswrapper[4592]: I0929 18:19:03.200826 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63942411-8161-486c-941a-b089b2efcfa0" path="/var/lib/kubelet/pods/63942411-8161-486c-941a-b089b2efcfa0/volumes" Sep 29 18:19:23 crc kubenswrapper[4592]: I0929 18:19:23.777350 4592 scope.go:117] "RemoveContainer" containerID="c3de6b425a59d7ff3cfea5d526feddd9c2fc13f1f492251c1f2064e1568dd464" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.766590 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:30 crc kubenswrapper[4592]: E0929 18:19:30.767451 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="extract-content" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.767463 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="extract-content" Sep 29 18:19:30 crc kubenswrapper[4592]: E0929 18:19:30.767478 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="registry-server" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.767484 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="registry-server" Sep 29 18:19:30 crc kubenswrapper[4592]: E0929 18:19:30.767509 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="extract-utilities" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.767515 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="extract-utilities" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.777755 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="63942411-8161-486c-941a-b089b2efcfa0" containerName="registry-server" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.780367 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.831163 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.884238 4592 patch_prober.go:28] interesting pod/machine-config-daemon-dfqzg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.884294 4592 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.884341 4592 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.885046 4592 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2"} pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.885092 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" containerName="machine-config-daemon" containerID="cri-o://a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" gracePeriod=600 Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.914548 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.915860 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:30 crc kubenswrapper[4592]: I0929 18:19:30.916262 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xbgn\" (UniqueName: \"kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.019239 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xbgn\" (UniqueName: \"kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.019659 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.019739 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.020333 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.021000 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.043456 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xbgn\" (UniqueName: \"kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn\") pod \"certified-operators-5x7kt\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.121269 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:31 crc kubenswrapper[4592]: E0929 18:19:31.169863 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.342871 4592 generic.go:334] "Generic (PLEG): container finished" podID="4cc986fa-6620-43ff-ae05-11c71e326035" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" exitCode=0 Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.343053 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" event={"ID":"4cc986fa-6620-43ff-ae05-11c71e326035","Type":"ContainerDied","Data":"a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2"} Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.343226 4592 scope.go:117] "RemoveContainer" containerID="fb692c262e198dd5e7af83b00f4158ad42d9ceb33907aecb9725919fbdbcfca9" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.343915 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:19:31 crc kubenswrapper[4592]: E0929 18:19:31.344227 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:19:31 crc kubenswrapper[4592]: I0929 18:19:31.789041 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:32 crc kubenswrapper[4592]: I0929 18:19:32.361390 4592 generic.go:334] "Generic (PLEG): container finished" podID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerID="08dae3d6290a437b92d49cc7a0089b6cb794c9a2f31fb22650f1ed1ff7e9a3a6" exitCode=0 Sep 29 18:19:32 crc kubenswrapper[4592]: I0929 18:19:32.361450 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerDied","Data":"08dae3d6290a437b92d49cc7a0089b6cb794c9a2f31fb22650f1ed1ff7e9a3a6"} Sep 29 18:19:32 crc kubenswrapper[4592]: I0929 18:19:32.361488 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerStarted","Data":"24c8a420a7a4387ee2bdf3849d7b631dcb3b8785b5698461cdd04bf139822360"} Sep 29 18:19:33 crc kubenswrapper[4592]: I0929 18:19:33.371477 4592 generic.go:334] "Generic (PLEG): container finished" podID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerID="bd334d59de4bd75ba05c3b52a05c46b9c5956a30fe8770e4b5f2b3ed54e11054" exitCode=0 Sep 29 18:19:33 crc kubenswrapper[4592]: I0929 18:19:33.371516 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-mjks7/must-gather-dsd9f" event={"ID":"f8ef9825-c099-4d2c-9241-9eef9d20e90f","Type":"ContainerDied","Data":"bd334d59de4bd75ba05c3b52a05c46b9c5956a30fe8770e4b5f2b3ed54e11054"} Sep 29 18:19:33 crc kubenswrapper[4592]: I0929 18:19:33.371882 4592 scope.go:117] "RemoveContainer" containerID="bd334d59de4bd75ba05c3b52a05c46b9c5956a30fe8770e4b5f2b3ed54e11054" Sep 29 18:19:33 crc kubenswrapper[4592]: I0929 18:19:33.498284 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mjks7_must-gather-dsd9f_f8ef9825-c099-4d2c-9241-9eef9d20e90f/gather/0.log" Sep 29 18:19:34 crc kubenswrapper[4592]: I0929 18:19:34.381963 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerStarted","Data":"3ffaefc27a4ec58d7b17b1de6c2090a96726cfd880494e37cca93bd7e8a90ecd"} Sep 29 18:19:35 crc kubenswrapper[4592]: I0929 18:19:35.397952 4592 generic.go:334] "Generic (PLEG): container finished" podID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerID="3ffaefc27a4ec58d7b17b1de6c2090a96726cfd880494e37cca93bd7e8a90ecd" exitCode=0 Sep 29 18:19:35 crc kubenswrapper[4592]: I0929 18:19:35.398863 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerDied","Data":"3ffaefc27a4ec58d7b17b1de6c2090a96726cfd880494e37cca93bd7e8a90ecd"} Sep 29 18:19:36 crc kubenswrapper[4592]: I0929 18:19:36.410277 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerStarted","Data":"dc44f7923025964cc17415143fe57a8668896b6f45af59f54267363db28ea85d"} Sep 29 18:19:36 crc kubenswrapper[4592]: I0929 18:19:36.435465 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5x7kt" podStartSLOduration=2.950967742 podStartE2EDuration="6.435448038s" podCreationTimestamp="2025-09-29 18:19:30 +0000 UTC" firstStartedPulling="2025-09-29 18:19:32.363964431 +0000 UTC m=+5302.511742152" lastFinishedPulling="2025-09-29 18:19:35.848444767 +0000 UTC m=+5305.996222448" observedRunningTime="2025-09-29 18:19:36.430217934 +0000 UTC m=+5306.577995626" watchObservedRunningTime="2025-09-29 18:19:36.435448038 +0000 UTC m=+5306.583225719" Sep 29 18:19:41 crc kubenswrapper[4592]: I0929 18:19:41.121740 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:41 crc kubenswrapper[4592]: I0929 18:19:41.122312 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:41 crc kubenswrapper[4592]: I0929 18:19:41.203871 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:41 crc kubenswrapper[4592]: I0929 18:19:41.519244 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:41 crc kubenswrapper[4592]: I0929 18:19:41.569069 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:42 crc kubenswrapper[4592]: I0929 18:19:42.183199 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:19:42 crc kubenswrapper[4592]: E0929 18:19:42.183441 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:19:43 crc kubenswrapper[4592]: I0929 18:19:43.486576 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5x7kt" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="registry-server" containerID="cri-o://dc44f7923025964cc17415143fe57a8668896b6f45af59f54267363db28ea85d" gracePeriod=2 Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.502336 4592 generic.go:334] "Generic (PLEG): container finished" podID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerID="dc44f7923025964cc17415143fe57a8668896b6f45af59f54267363db28ea85d" exitCode=0 Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.502382 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerDied","Data":"dc44f7923025964cc17415143fe57a8668896b6f45af59f54267363db28ea85d"} Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.637166 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.694619 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xbgn\" (UniqueName: \"kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn\") pod \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.694673 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content\") pod \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.694772 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities\") pod \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\" (UID: \"1813154f-5125-48d1-a4f4-ab1b27bee5bf\") " Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.695951 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities" (OuterVolumeSpecName: "utilities") pod "1813154f-5125-48d1-a4f4-ab1b27bee5bf" (UID: "1813154f-5125-48d1-a4f4-ab1b27bee5bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.700974 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn" (OuterVolumeSpecName: "kube-api-access-5xbgn") pod "1813154f-5125-48d1-a4f4-ab1b27bee5bf" (UID: "1813154f-5125-48d1-a4f4-ab1b27bee5bf"). InnerVolumeSpecName "kube-api-access-5xbgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.796522 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xbgn\" (UniqueName: \"kubernetes.io/projected/1813154f-5125-48d1-a4f4-ab1b27bee5bf-kube-api-access-5xbgn\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:44 crc kubenswrapper[4592]: I0929 18:19:44.796552 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.215255 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1813154f-5125-48d1-a4f4-ab1b27bee5bf" (UID: "1813154f-5125-48d1-a4f4-ab1b27bee5bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.307405 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1813154f-5125-48d1-a4f4-ab1b27bee5bf-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.513105 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7kt" event={"ID":"1813154f-5125-48d1-a4f4-ab1b27bee5bf","Type":"ContainerDied","Data":"24c8a420a7a4387ee2bdf3849d7b631dcb3b8785b5698461cdd04bf139822360"} Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.513185 4592 scope.go:117] "RemoveContainer" containerID="dc44f7923025964cc17415143fe57a8668896b6f45af59f54267363db28ea85d" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.513203 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7kt" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.546737 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.551521 4592 scope.go:117] "RemoveContainer" containerID="3ffaefc27a4ec58d7b17b1de6c2090a96726cfd880494e37cca93bd7e8a90ecd" Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.556851 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5x7kt"] Sep 29 18:19:45 crc kubenswrapper[4592]: I0929 18:19:45.576083 4592 scope.go:117] "RemoveContainer" containerID="08dae3d6290a437b92d49cc7a0089b6cb794c9a2f31fb22650f1ed1ff7e9a3a6" Sep 29 18:19:47 crc kubenswrapper[4592]: I0929 18:19:47.199134 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" path="/var/lib/kubelet/pods/1813154f-5125-48d1-a4f4-ab1b27bee5bf/volumes" Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.247482 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-mjks7/must-gather-dsd9f"] Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.248102 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-mjks7/must-gather-dsd9f" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="copy" containerID="cri-o://4243085c3cd9b7ba49f303477262f4e6b9a79c4bca979fc4dc0fcf76cd0713a4" gracePeriod=2 Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.261656 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-mjks7/must-gather-dsd9f"] Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.555674 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mjks7_must-gather-dsd9f_f8ef9825-c099-4d2c-9241-9eef9d20e90f/copy/0.log" Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.559419 4592 generic.go:334] "Generic (PLEG): container finished" podID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerID="4243085c3cd9b7ba49f303477262f4e6b9a79c4bca979fc4dc0fcf76cd0713a4" exitCode=143 Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.749473 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mjks7_must-gather-dsd9f_f8ef9825-c099-4d2c-9241-9eef9d20e90f/copy/0.log" Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.749793 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.890027 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output\") pod \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.890132 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7nj9\" (UniqueName: \"kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9\") pod \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\" (UID: \"f8ef9825-c099-4d2c-9241-9eef9d20e90f\") " Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.901425 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9" (OuterVolumeSpecName: "kube-api-access-d7nj9") pod "f8ef9825-c099-4d2c-9241-9eef9d20e90f" (UID: "f8ef9825-c099-4d2c-9241-9eef9d20e90f"). InnerVolumeSpecName "kube-api-access-d7nj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:19:48 crc kubenswrapper[4592]: I0929 18:19:48.992007 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7nj9\" (UniqueName: \"kubernetes.io/projected/f8ef9825-c099-4d2c-9241-9eef9d20e90f-kube-api-access-d7nj9\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.073425 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f8ef9825-c099-4d2c-9241-9eef9d20e90f" (UID: "f8ef9825-c099-4d2c-9241-9eef9d20e90f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.093337 4592 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f8ef9825-c099-4d2c-9241-9eef9d20e90f-must-gather-output\") on node \"crc\" DevicePath \"\"" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.198239 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" path="/var/lib/kubelet/pods/f8ef9825-c099-4d2c-9241-9eef9d20e90f/volumes" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.569571 4592 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-mjks7_must-gather-dsd9f_f8ef9825-c099-4d2c-9241-9eef9d20e90f/copy/0.log" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.570104 4592 scope.go:117] "RemoveContainer" containerID="4243085c3cd9b7ba49f303477262f4e6b9a79c4bca979fc4dc0fcf76cd0713a4" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.570295 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-mjks7/must-gather-dsd9f" Sep 29 18:19:49 crc kubenswrapper[4592]: I0929 18:19:49.617692 4592 scope.go:117] "RemoveContainer" containerID="bd334d59de4bd75ba05c3b52a05c46b9c5956a30fe8770e4b5f2b3ed54e11054" Sep 29 18:19:56 crc kubenswrapper[4592]: I0929 18:19:56.183299 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:19:56 crc kubenswrapper[4592]: E0929 18:19:56.184108 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.145023 4592 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.146162 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="gather" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146179 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="gather" Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.146190 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="copy" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146199 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="copy" Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.146214 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="extract-content" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146223 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="extract-content" Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.146242 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="registry-server" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146250 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="registry-server" Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.146282 4592 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="extract-utilities" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146290 4592 state_mem.go:107] "Deleted CPUSet assignment" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="extract-utilities" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146547 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="copy" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146560 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ef9825-c099-4d2c-9241-9eef9d20e90f" containerName="gather" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.146585 4592 memory_manager.go:354] "RemoveStaleState removing state" podUID="1813154f-5125-48d1-a4f4-ab1b27bee5bf" containerName="registry-server" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.148614 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.155798 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.178254 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z2ds\" (UniqueName: \"kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.178345 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.178385 4592 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.184279 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:20:08 crc kubenswrapper[4592]: E0929 18:20:08.185022 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.279807 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.279886 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.280093 4592 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z2ds\" (UniqueName: \"kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.280328 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.281486 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.300289 4592 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z2ds\" (UniqueName: \"kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds\") pod \"redhat-marketplace-xb2tx\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:08 crc kubenswrapper[4592]: I0929 18:20:08.486667 4592 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:09 crc kubenswrapper[4592]: I0929 18:20:09.000126 4592 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:09 crc kubenswrapper[4592]: I0929 18:20:09.789244 4592 generic.go:334] "Generic (PLEG): container finished" podID="9dc38e81-feef-48eb-a490-179e0d8d5250" containerID="2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d" exitCode=0 Sep 29 18:20:09 crc kubenswrapper[4592]: I0929 18:20:09.789354 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerDied","Data":"2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d"} Sep 29 18:20:09 crc kubenswrapper[4592]: I0929 18:20:09.789603 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerStarted","Data":"a177f28350199149041977a04e54c09221c5e2259e7ee89c7e8589eb9a97e4db"} Sep 29 18:20:11 crc kubenswrapper[4592]: I0929 18:20:11.808787 4592 generic.go:334] "Generic (PLEG): container finished" podID="9dc38e81-feef-48eb-a490-179e0d8d5250" containerID="16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2" exitCode=0 Sep 29 18:20:11 crc kubenswrapper[4592]: I0929 18:20:11.808969 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerDied","Data":"16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2"} Sep 29 18:20:12 crc kubenswrapper[4592]: I0929 18:20:12.822694 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerStarted","Data":"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7"} Sep 29 18:20:12 crc kubenswrapper[4592]: I0929 18:20:12.849489 4592 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xb2tx" podStartSLOduration=2.406199652 podStartE2EDuration="4.84946929s" podCreationTimestamp="2025-09-29 18:20:08 +0000 UTC" firstStartedPulling="2025-09-29 18:20:09.79220064 +0000 UTC m=+5339.939978371" lastFinishedPulling="2025-09-29 18:20:12.235470328 +0000 UTC m=+5342.383248009" observedRunningTime="2025-09-29 18:20:12.838281083 +0000 UTC m=+5342.986058764" watchObservedRunningTime="2025-09-29 18:20:12.84946929 +0000 UTC m=+5342.997246971" Sep 29 18:20:18 crc kubenswrapper[4592]: I0929 18:20:18.488257 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:18 crc kubenswrapper[4592]: I0929 18:20:18.489405 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:18 crc kubenswrapper[4592]: I0929 18:20:18.567228 4592 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:18 crc kubenswrapper[4592]: I0929 18:20:18.972095 4592 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:19 crc kubenswrapper[4592]: I0929 18:20:19.032017 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:20 crc kubenswrapper[4592]: I0929 18:20:20.908042 4592 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xb2tx" podUID="9dc38e81-feef-48eb-a490-179e0d8d5250" containerName="registry-server" containerID="cri-o://b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7" gracePeriod=2 Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.406165 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.453955 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities\") pod \"9dc38e81-feef-48eb-a490-179e0d8d5250\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.454015 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6z2ds\" (UniqueName: \"kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds\") pod \"9dc38e81-feef-48eb-a490-179e0d8d5250\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.454081 4592 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content\") pod \"9dc38e81-feef-48eb-a490-179e0d8d5250\" (UID: \"9dc38e81-feef-48eb-a490-179e0d8d5250\") " Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.454897 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities" (OuterVolumeSpecName: "utilities") pod "9dc38e81-feef-48eb-a490-179e0d8d5250" (UID: "9dc38e81-feef-48eb-a490-179e0d8d5250"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.469749 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds" (OuterVolumeSpecName: "kube-api-access-6z2ds") pod "9dc38e81-feef-48eb-a490-179e0d8d5250" (UID: "9dc38e81-feef-48eb-a490-179e0d8d5250"). InnerVolumeSpecName "kube-api-access-6z2ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.470036 4592 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9dc38e81-feef-48eb-a490-179e0d8d5250" (UID: "9dc38e81-feef-48eb-a490-179e0d8d5250"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.556712 4592 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6z2ds\" (UniqueName: \"kubernetes.io/projected/9dc38e81-feef-48eb-a490-179e0d8d5250-kube-api-access-6z2ds\") on node \"crc\" DevicePath \"\"" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.556896 4592 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-catalog-content\") on node \"crc\" DevicePath \"\"" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.556954 4592 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9dc38e81-feef-48eb-a490-179e0d8d5250-utilities\") on node \"crc\" DevicePath \"\"" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.924911 4592 generic.go:334] "Generic (PLEG): container finished" podID="9dc38e81-feef-48eb-a490-179e0d8d5250" containerID="b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7" exitCode=0 Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.924972 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerDied","Data":"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7"} Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.925493 4592 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xb2tx" event={"ID":"9dc38e81-feef-48eb-a490-179e0d8d5250","Type":"ContainerDied","Data":"a177f28350199149041977a04e54c09221c5e2259e7ee89c7e8589eb9a97e4db"} Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.925541 4592 scope.go:117] "RemoveContainer" containerID="b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.925008 4592 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xb2tx" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.967109 4592 scope.go:117] "RemoveContainer" containerID="16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2" Sep 29 18:20:21 crc kubenswrapper[4592]: I0929 18:20:21.998253 4592 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.012068 4592 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xb2tx"] Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.016917 4592 scope.go:117] "RemoveContainer" containerID="2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.078012 4592 scope.go:117] "RemoveContainer" containerID="b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7" Sep 29 18:20:22 crc kubenswrapper[4592]: E0929 18:20:22.078910 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7\": container with ID starting with b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7 not found: ID does not exist" containerID="b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.078966 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7"} err="failed to get container status \"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7\": rpc error: code = NotFound desc = could not find container \"b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7\": container with ID starting with b0eb211a7717b17bcce576ab1d7fff410ea75cda60c2093d95081bdb3e2964f7 not found: ID does not exist" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.078998 4592 scope.go:117] "RemoveContainer" containerID="16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2" Sep 29 18:20:22 crc kubenswrapper[4592]: E0929 18:20:22.079614 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2\": container with ID starting with 16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2 not found: ID does not exist" containerID="16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.079680 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2"} err="failed to get container status \"16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2\": rpc error: code = NotFound desc = could not find container \"16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2\": container with ID starting with 16376af2333552e520e4d5ff286191edc32a74b86e4f71d39cdc19035c6b1ab2 not found: ID does not exist" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.079720 4592 scope.go:117] "RemoveContainer" containerID="2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d" Sep 29 18:20:22 crc kubenswrapper[4592]: E0929 18:20:22.080558 4592 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d\": container with ID starting with 2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d not found: ID does not exist" containerID="2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d" Sep 29 18:20:22 crc kubenswrapper[4592]: I0929 18:20:22.080602 4592 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d"} err="failed to get container status \"2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d\": rpc error: code = NotFound desc = could not find container \"2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d\": container with ID starting with 2eb4f2bbbe0ff80d9906a48ca0bf242d08891b720032db9ad31405693ddf6e3d not found: ID does not exist" Sep 29 18:20:23 crc kubenswrapper[4592]: I0929 18:20:23.184412 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:20:23 crc kubenswrapper[4592]: E0929 18:20:23.184881 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:20:23 crc kubenswrapper[4592]: I0929 18:20:23.205599 4592 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc38e81-feef-48eb-a490-179e0d8d5250" path="/var/lib/kubelet/pods/9dc38e81-feef-48eb-a490-179e0d8d5250/volumes" Sep 29 18:20:36 crc kubenswrapper[4592]: I0929 18:20:36.183024 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:20:36 crc kubenswrapper[4592]: E0929 18:20:36.184073 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:20:47 crc kubenswrapper[4592]: I0929 18:20:47.183064 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:20:47 crc kubenswrapper[4592]: E0929 18:20:47.184199 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:20:58 crc kubenswrapper[4592]: I0929 18:20:58.183963 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:20:58 crc kubenswrapper[4592]: E0929 18:20:58.185190 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:21:10 crc kubenswrapper[4592]: I0929 18:21:10.185755 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:21:10 crc kubenswrapper[4592]: E0929 18:21:10.187064 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:21:24 crc kubenswrapper[4592]: I0929 18:21:24.009611 4592 scope.go:117] "RemoveContainer" containerID="71e340a9daf6da492e5dd7f54350ca9690747ca9023211a1e29a7eba99f63d98" Sep 29 18:21:25 crc kubenswrapper[4592]: I0929 18:21:25.184590 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:21:25 crc kubenswrapper[4592]: E0929 18:21:25.187181 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" Sep 29 18:21:40 crc kubenswrapper[4592]: I0929 18:21:40.188392 4592 scope.go:117] "RemoveContainer" containerID="a54c427e743e4f95adda1e3d032f046f9f6c0cc7511eb18f0203e3cba9cec9b2" Sep 29 18:21:40 crc kubenswrapper[4592]: E0929 18:21:40.189809 4592 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dfqzg_openshift-machine-config-operator(4cc986fa-6620-43ff-ae05-11c71e326035)\"" pod="openshift-machine-config-operator/machine-config-daemon-dfqzg" podUID="4cc986fa-6620-43ff-ae05-11c71e326035" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515066547106024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015066547106017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015066533747016525 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015066533750015467 5ustar corecore